| Index: src/arm64/code-stubs-arm64.cc
|
| diff --git a/src/arm64/code-stubs-arm64.cc b/src/arm64/code-stubs-arm64.cc
|
| index e672b9dec8d63c0c4b8ad9b8a66589d961d67dd5..da77ca5d45dd44e2d34b4507d802eb9e4a4fa308 100644
|
| --- a/src/arm64/code-stubs-arm64.cc
|
| +++ b/src/arm64/code-stubs-arm64.cc
|
| @@ -2136,54 +2136,6 @@ void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
|
| - // x2 : number of parameters (tagged)
|
| - // x3 : parameters pointer
|
| - // x4 : rest parameter index (tagged)
|
| - //
|
| - // Returns pointer to result object in x0.
|
| -
|
| - DCHECK(x2.is(ArgumentsAccessNewDescriptor::parameter_count()));
|
| - DCHECK(x3.is(RestParamAccessDescriptor::parameter_pointer()));
|
| - DCHECK(x4.is(RestParamAccessDescriptor::rest_parameter_index()));
|
| -
|
| - // Get the stub arguments from the frame, and make an untagged copy of the
|
| - // parameter count.
|
| - Register rest_index_smi = x4;
|
| - Register param_count_smi = x2;
|
| - Register params = x3;
|
| - Register param_count = x13;
|
| - __ SmiUntag(param_count, param_count_smi);
|
| -
|
| - // Test if arguments adaptor needed.
|
| - Register caller_fp = x11;
|
| - Register caller_ctx = x12;
|
| - Label runtime;
|
| - __ Ldr(caller_fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
|
| - __ Ldr(caller_ctx,
|
| - MemOperand(caller_fp, StandardFrameConstants::kContextOffset));
|
| - __ Cmp(caller_ctx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
| - __ B(ne, &runtime);
|
| -
|
| - // x4 rest_index_smi index of rest parameter
|
| - // x2 param_count_smi number of parameters passed to function (smi)
|
| - // x3 params pointer to parameters
|
| - // x11 caller_fp caller's frame pointer
|
| - // x13 param_count number of parameters passed to function
|
| -
|
| - // Patch the argument length and parameters pointer.
|
| - __ Ldr(param_count_smi,
|
| - MemOperand(caller_fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| - __ SmiUntag(param_count, param_count_smi);
|
| - __ Add(x10, caller_fp, Operand(param_count, LSL, kPointerSizeLog2));
|
| - __ Add(params, x10, StandardFrameConstants::kCallerSPOffset);
|
| -
|
| - __ Bind(&runtime);
|
| - __ Push(param_count_smi, params, rest_index_smi);
|
| - __ TailCallRuntime(Runtime::kNewRestParam);
|
| -}
|
| -
|
| -
|
| void RegExpExecStub::Generate(MacroAssembler* masm) {
|
| #ifdef V8_INTERPRETED_REGEXP
|
| __ TailCallRuntime(Runtime::kRegExpExec);
|
| @@ -5392,6 +5344,153 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| +void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
|
| + // ----------- S t a t e -------------
|
| + // -- x1 : function
|
| + // -- cp : context
|
| + // -- fp : frame pointer
|
| + // -- lr : return address
|
| + // -----------------------------------
|
| + __ AssertFunction(x1);
|
| +
|
| + // For Ignition we need to skip all possible handler/stub frames until
|
| + // we reach the JavaScript frame for the function (similar to what the
|
| + // runtime fallback implementation does). So make x2 point to that
|
| + // JavaScript frame.
|
| + {
|
| + Label loop, loop_entry;
|
| + __ Mov(x2, fp);
|
| + __ B(&loop_entry);
|
| + __ Bind(&loop);
|
| + __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
|
| + __ Bind(&loop_entry);
|
| + __ Ldr(x3, MemOperand(x2, StandardFrameConstants::kMarkerOffset));
|
| + __ Cmp(x3, x1);
|
| + __ B(ne, &loop);
|
| + }
|
| +
|
| + // Check if we have rest parameters (only possible if we have an
|
| + // arguments adaptor frame below the function frame).
|
| + Label no_rest_parameters;
|
| + __ Ldr(x2, MemOperand(x2, StandardFrameConstants::kCallerFPOffset));
|
| + __ Ldr(x3, MemOperand(x2, StandardFrameConstants::kContextOffset));
|
| + __ Cmp(x3, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
|
| + __ B(ne, &no_rest_parameters);
|
| +
|
| + // Check if the arguments adaptor frame contains more arguments than
|
| + // specified by the function's internal formal parameter count.
|
| + Label rest_parameters;
|
| + __ Ldrsw(x0, UntagSmiMemOperand(
|
| + x2, ArgumentsAdaptorFrameConstants::kLengthOffset));
|
| + __ Ldr(x1, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
|
| + __ Ldrsw(
|
| + x1, FieldMemOperand(x1, SharedFunctionInfo::kFormalParameterCountOffset));
|
| + __ Subs(x0, x0, x1);
|
| + __ B(gt, &rest_parameters);
|
| +
|
| + // Return an empty rest parameter array.
|
| + __ Bind(&no_rest_parameters);
|
| + {
|
| + // ----------- S t a t e -------------
|
| + // -- cp : context
|
| + // -- lr : return address
|
| + // -----------------------------------
|
| +
|
| + // Allocate an empty rest parameter array.
|
| + Label allocate, done_allocate;
|
| + __ Allocate(JSArray::kSize, x0, x1, x2, &allocate, TAG_OBJECT);
|
| + __ Bind(&done_allocate);
|
| +
|
| + // Setup the rest parameter array in x0.
|
| + __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, x1);
|
| + __ Str(x1, FieldMemOperand(x0, JSArray::kMapOffset));
|
| + __ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex);
|
| + __ Str(x1, FieldMemOperand(x0, JSArray::kPropertiesOffset));
|
| + __ Str(x1, FieldMemOperand(x0, JSArray::kElementsOffset));
|
| + __ Mov(x1, Smi::FromInt(0));
|
| + __ Str(x1, FieldMemOperand(x0, JSArray::kLengthOffset));
|
| + STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
|
| + __ Ret();
|
| +
|
| + // Fall back to %AllocateInNewSpace.
|
| + __ Bind(&allocate);
|
| + {
|
| + FrameScope scope(masm, StackFrame::INTERNAL);
|
| + __ Push(Smi::FromInt(JSArray::kSize));
|
| + __ CallRuntime(Runtime::kAllocateInNewSpace);
|
| + }
|
| + __ B(&done_allocate);
|
| + }
|
| +
|
| + __ Bind(&rest_parameters);
|
| + {
|
| + // Compute the pointer to the first rest parameter (skippping the receiver).
|
| + __ Add(x2, x2, Operand(x0, LSL, kPointerSizeLog2));
|
| + __ Add(x2, x2, StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize);
|
| +
|
| + // ----------- S t a t e -------------
|
| + // -- cp : context
|
| + // -- x0 : number of rest parameters
|
| + // -- x2 : pointer to first rest parameters
|
| + // -- lr : return address
|
| + // -----------------------------------
|
| +
|
| + // Allocate space for the rest parameter array plus the backing store.
|
| + Label allocate, done_allocate;
|
| + __ Mov(x1, JSArray::kSize + FixedArray::kHeaderSize);
|
| + __ Add(x1, x1, Operand(x0, LSL, kPointerSizeLog2));
|
| + __ Allocate(x1, x3, x4, x5, &allocate, TAG_OBJECT);
|
| + __ Bind(&done_allocate);
|
| +
|
| + // Compute arguments.length in x6.
|
| + __ SmiTag(x6, x0);
|
| +
|
| + // Setup the elements array in x3.
|
| + __ LoadRoot(x1, Heap::kFixedArrayMapRootIndex);
|
| + __ Str(x1, FieldMemOperand(x3, FixedArray::kMapOffset));
|
| + __ Str(x6, FieldMemOperand(x3, FixedArray::kLengthOffset));
|
| + __ Add(x4, x3, FixedArray::kHeaderSize);
|
| + {
|
| + Label loop, done_loop;
|
| + __ Add(x0, x4, Operand(x0, LSL, kPointerSizeLog2));
|
| + __ Bind(&loop);
|
| + __ Cmp(x4, x0);
|
| + __ B(eq, &done_loop);
|
| + __ Ldr(x5, MemOperand(x2, 0 * kPointerSize));
|
| + __ Str(x5, FieldMemOperand(x4, 0 * kPointerSize));
|
| + __ Sub(x2, x2, Operand(1 * kPointerSize));
|
| + __ Add(x4, x4, Operand(1 * kPointerSize));
|
| + __ B(&loop);
|
| + __ Bind(&done_loop);
|
| + }
|
| +
|
| + // Setup the rest parameter array in x0.
|
| + __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, x1);
|
| + __ Str(x1, FieldMemOperand(x0, JSArray::kMapOffset));
|
| + __ LoadRoot(x1, Heap::kEmptyFixedArrayRootIndex);
|
| + __ Str(x1, FieldMemOperand(x0, JSArray::kPropertiesOffset));
|
| + __ Str(x3, FieldMemOperand(x0, JSArray::kElementsOffset));
|
| + __ Str(x6, FieldMemOperand(x0, JSArray::kLengthOffset));
|
| + STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
|
| + __ Ret();
|
| +
|
| + // Fall back to %AllocateInNewSpace.
|
| + __ Bind(&allocate);
|
| + {
|
| + FrameScope scope(masm, StackFrame::INTERNAL);
|
| + __ SmiTag(x0);
|
| + __ SmiTag(x1);
|
| + __ Push(x0, x2, x1);
|
| + __ CallRuntime(Runtime::kAllocateInNewSpace);
|
| + __ Mov(x3, x0);
|
| + __ Pop(x2, x0);
|
| + __ SmiUntag(x0);
|
| + }
|
| + __ B(&done_allocate);
|
| + }
|
| +}
|
| +
|
| +
|
| void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
|
| Register context = cp;
|
| Register result = x0;
|
|
|