| Index: src/arm64/builtins-arm64.cc
|
| diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc
|
| index dfb59c0504dacb1f3ffa5d7a616d6665bb8581db..97ccf196580b25c15e2e7ab6587652f25065effc 100644
|
| --- a/src/arm64/builtins-arm64.cc
|
| +++ b/src/arm64/builtins-arm64.cc
|
| @@ -799,6 +799,46 @@ void Builtins::Generate_JSConstructStubForDerived(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| +enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
|
| +
|
| +
|
| +// Clobbers x10, x15; preserves all other registers.
|
| +static void Generate_CheckStackOverflow(MacroAssembler* masm,
|
| + const int calleeOffset, Register argc,
|
| + IsTagged argc_is_tagged) {
|
| + Register function = x15;
|
| +
|
| + // Check the stack for overflow.
|
| + // We are not trying to catch interruptions (e.g. debug break and
|
| + // preemption) here, so the "real stack limit" is checked.
|
| + Label enough_stack_space;
|
| + __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
|
| + __ Ldr(function, MemOperand(fp, calleeOffset));
|
| + // Make x10 the space we have left. The stack might already be overflowed
|
| + // here which will cause x10 to become negative.
|
| + // TODO(jbramley): Check that the stack usage here is safe.
|
| + __ Sub(x10, jssp, x10);
|
| + // Check if the arguments will overflow the stack.
|
| + if (argc_is_tagged == kArgcIsSmiTagged) {
|
| + __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
|
| + } else {
|
| + DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
|
| + __ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
|
| + }
|
| + __ B(gt, &enough_stack_space);
|
| + // There is not enough stack space, so use a builtin to throw an appropriate
|
| + // error.
|
| + __ Push(function, argc);
|
| + __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
|
| + // We should never return from the APPLY_OVERFLOW builtin.
|
| + if (__ emit_debug_code()) {
|
| + __ Unreachable();
|
| + }
|
| +
|
| + __ Bind(&enough_stack_space);
|
| +}
|
| +
|
| +
|
| // Input:
|
| // x0: code entry.
|
| // x1: function.
|
| @@ -832,6 +872,15 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
| // Push the function and the receiver onto the stack.
|
| __ Push(function, receiver);
|
|
|
| + // Check if we have enough stack space to push all arguments.
|
| + // The function is the first thing that was pushed above after entering
|
| + // the internal frame.
|
| + const int kFunctionOffset =
|
| + InternalFrameConstants::kCodeOffset - kPointerSize;
|
| + // Expects argument count in eax. Clobbers ecx, edx, edi.
|
| + Generate_CheckStackOverflow(masm, kFunctionOffset, argc,
|
| + kArgcIsUntaggedInt);
|
| +
|
| // Copy arguments to the stack in a loop, in reverse order.
|
| // x3: argc.
|
| // x4: argv.
|
| @@ -1324,37 +1373,6 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| -static void Generate_CheckStackOverflow(MacroAssembler* masm,
|
| - const int calleeOffset) {
|
| - Register argc = x0;
|
| - Register function = x15;
|
| -
|
| - // Check the stack for overflow.
|
| - // We are not trying to catch interruptions (e.g. debug break and
|
| - // preemption) here, so the "real stack limit" is checked.
|
| - Label enough_stack_space;
|
| - __ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
|
| - __ Ldr(function, MemOperand(fp, calleeOffset));
|
| - // Make x10 the space we have left. The stack might already be overflowed
|
| - // here which will cause x10 to become negative.
|
| - // TODO(jbramley): Check that the stack usage here is safe.
|
| - __ Sub(x10, jssp, x10);
|
| - // Check if the arguments will overflow the stack.
|
| - __ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
|
| - __ B(gt, &enough_stack_space);
|
| - // There is not enough stack space, so use a builtin to throw an appropriate
|
| - // error.
|
| - __ Push(function, argc);
|
| - __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
|
| - // We should never return from the APPLY_OVERFLOW builtin.
|
| - if (__ emit_debug_code()) {
|
| - __ Unreachable();
|
| - }
|
| -
|
| - __ Bind(&enough_stack_space);
|
| -}
|
| -
|
| -
|
| static void Generate_PushAppliedArguments(MacroAssembler* masm,
|
| const int argumentsOffset,
|
| const int indexOffset,
|
| @@ -1422,7 +1440,7 @@ static void Generate_ApplyHelper(MacroAssembler* masm, bool targetIsArgument) {
|
| }
|
| Register argc = x0;
|
|
|
| - Generate_CheckStackOverflow(masm, kFunctionOffset);
|
| + Generate_CheckStackOverflow(masm, kFunctionOffset, argc, kArgcIsSmiTagged);
|
|
|
| // Push current limit and index.
|
| __ Mov(x1, 0); // Initial index.
|
| @@ -1549,7 +1567,7 @@ static void Generate_ConstructHelper(MacroAssembler* masm) {
|
| __ InvokeBuiltin(Builtins::REFLECT_CONSTRUCT_PREPARE, CALL_FUNCTION);
|
| Register argc = x0;
|
|
|
| - Generate_CheckStackOverflow(masm, kFunctionOffset);
|
| + Generate_CheckStackOverflow(masm, kFunctionOffset, argc, kArgcIsSmiTagged);
|
|
|
| // Push current limit and index, constructor & newTarget
|
| __ Mov(x1, 0); // Initial index.
|
|
|