Index: src/builtins/x64/builtins-x64.cc |
diff --git a/src/builtins/x64/builtins-x64.cc b/src/builtins/x64/builtins-x64.cc |
index b232077e6584f71ce99ee47d18d00b74b8bed61e..58d38cf138a6236728f3a6fc9e42f971d98537c3 100644 |
--- a/src/builtins/x64/builtins-x64.cc |
+++ b/src/builtins/x64/builtins-x64.cc |
@@ -765,7 +765,7 @@ static void Generate_InterpreterPushArgs(MacroAssembler* masm, |
// static |
void Builtins::Generate_InterpreterPushArgsAndCallImpl( |
MacroAssembler* masm, TailCallMode tail_call_mode, |
- CallableType function_type) { |
+ InterpreterPushArgsMode mode) { |
// ----------- S t a t e ------------- |
// -- rax : the number of arguments (not including the receiver) |
// -- rbx : the address of the first argument to be pushed. Subsequent |
@@ -791,12 +791,14 @@ void Builtins::Generate_InterpreterPushArgsAndCallImpl( |
// Call the target. |
__ PushReturnAddressFrom(kScratchRegister); // Re-push return address. |
- if (function_type == CallableType::kJSFunction) { |
+ if (mode == InterpreterPushArgsMode::kJSFunction) { |
__ Jump(masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny, |
tail_call_mode), |
RelocInfo::CODE_TARGET); |
+ } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { |
+ __ Jump(masm->isolate()->builtins()->CallWithSpread(), |
+ RelocInfo::CODE_TARGET); |
} else { |
- DCHECK_EQ(function_type, CallableType::kAny); |
__ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
tail_call_mode), |
RelocInfo::CODE_TARGET); |
@@ -813,7 +815,7 @@ void Builtins::Generate_InterpreterPushArgsAndCallImpl( |
// static |
void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
- MacroAssembler* masm, PushArgsConstructMode mode) { |
+ MacroAssembler* masm, InterpreterPushArgsMode mode) { |
// ----------- S t a t e ------------- |
// -- rax : the number of arguments (not including the receiver) |
// -- rdx : the new target (either the same as the constructor or |
@@ -842,7 +844,7 @@ void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
__ PushReturnAddressFrom(kScratchRegister); |
__ AssertUndefinedOrAllocationSite(rbx); |
- if (mode == PushArgsConstructMode::kJSFunction) { |
+ if (mode == InterpreterPushArgsMode::kJSFunction) { |
// Tail call to the function-specific construct stub (still in the caller |
// context at this point). |
__ AssertFunction(rdi); |
@@ -852,12 +854,12 @@ void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
__ leap(rcx, FieldOperand(rcx, Code::kHeaderSize)); |
// Jump to the constructor function (rax, rbx, rdx passed on). |
__ jmp(rcx); |
- } else if (mode == PushArgsConstructMode::kWithFinalSpread) { |
+ } else if (mode == InterpreterPushArgsMode::kWithFinalSpread) { |
// Call the constructor (rax, rdx, rdi passed on). |
__ Jump(masm->isolate()->builtins()->ConstructWithSpread(), |
RelocInfo::CODE_TARGET); |
} else { |
- DCHECK_EQ(PushArgsConstructMode::kOther, mode); |
+ DCHECK_EQ(InterpreterPushArgsMode::kOther, mode); |
// Call the constructor (rax, rdx, rdi passed on). |
__ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
} |
@@ -2761,6 +2763,143 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, |
} |
} |
+static void CheckSpreadAndPushToStack(MacroAssembler* masm) { |
+ // Load the spread argument into rbx. |
+ __ movp(rbx, Operand(rsp, kPointerSize)); |
+ // Load the map of the spread into r15. |
+ __ movp(r15, FieldOperand(rbx, HeapObject::kMapOffset)); |
+ // Load native context into r14. |
+ __ movp(r14, NativeContextOperand()); |
+ |
+ Label runtime_call, push_args; |
+ // Check that the spread is an array. |
+ __ CmpInstanceType(r15, JS_ARRAY_TYPE); |
+ __ j(not_equal, &runtime_call); |
+ |
+ // Check that we have the original ArrayPrototype. |
+ __ movp(rcx, FieldOperand(r15, Map::kPrototypeOffset)); |
+ __ cmpp(rcx, ContextOperand(r14, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
+ __ j(not_equal, &runtime_call); |
+ |
+ // Check that the ArrayPrototype hasn't been modified in a way that would |
+ // affect iteration. |
+ __ LoadRoot(rcx, Heap::kArrayIteratorProtectorRootIndex); |
+ __ Cmp(FieldOperand(rcx, Cell::kValueOffset), |
+ Smi::FromInt(Isolate::kProtectorValid)); |
+ __ j(not_equal, &runtime_call); |
+ |
+ // Check that the map of the initial array iterator hasn't changed. |
+ __ movp(rcx, |
+ ContextOperand(r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
+ __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
+ __ cmpp(rcx, ContextOperand( |
+ r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
+ __ j(not_equal, &runtime_call); |
+ |
+ // For FastPacked kinds, iteration will have the same effect as simply |
+ // accessing each property in order. |
+ Label no_protector_check; |
+ __ movzxbp(rcx, FieldOperand(r15, Map::kBitField2Offset)); |
+ __ DecodeField<Map::ElementsKindBits>(rcx); |
+ __ cmpp(rcx, Immediate(FAST_HOLEY_ELEMENTS)); |
+ __ j(above, &runtime_call); |
+ // For non-FastHoley kinds, we can skip the protector check. |
+ __ cmpp(rcx, Immediate(FAST_SMI_ELEMENTS)); |
+ __ j(equal, &no_protector_check); |
+ __ cmpp(rcx, Immediate(FAST_ELEMENTS)); |
+ __ j(equal, &no_protector_check); |
+ // Check the ArrayProtector cell. |
+ __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex); |
+ __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset), |
+ Smi::FromInt(Isolate::kProtectorValid)); |
+ __ j(not_equal, &runtime_call); |
+ |
+ __ bind(&no_protector_check); |
+ // Load the FixedArray backing store, but use the length from the array. |
+ __ SmiToInteger32(r9, FieldOperand(rbx, JSArray::kLengthOffset)); |
+ __ movp(rbx, FieldOperand(rbx, JSArray::kElementsOffset)); |
+ __ jmp(&push_args); |
+ |
+ __ bind(&runtime_call); |
+ { |
+ // Call the builtin for the result of the spread. |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ __ Push(rdi); // target |
+ __ Push(rdx); // new target |
+ __ Integer32ToSmi(rax, rax); |
+ __ Push(rax); // nargs |
+ __ Push(rbx); |
+ __ CallRuntime(Runtime::kSpreadIterableFixed); |
+ __ movp(rbx, rax); |
+ __ Pop(rax); // nargs |
+ __ SmiToInteger32(rax, rax); |
+ __ Pop(rdx); // new target |
+ __ Pop(rdi); // target |
+ } |
+ |
+ { |
+ // Calculate the new nargs including the result of the spread. |
+ __ SmiToInteger32(r9, FieldOperand(rbx, FixedArray::kLengthOffset)); |
+ |
+ __ bind(&push_args); |
+ // rax += r9 - 1. Subtract 1 for the spread itself. |
+ __ leap(rax, Operand(rax, r9, times_1, -1)); |
+ |
+ // Pop the return address and spread argument. |
+ __ PopReturnAddressTo(r8); |
+ __ Pop(rcx); |
+ } |
+ |
+ // Check for stack overflow. |
+ { |
+ // Check the stack for overflow. We are not trying to catch interruptions |
+ // (i.e. debug break and preemption) here, so check the "real stack limit". |
+ Label done; |
+ __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); |
+ __ movp(rcx, rsp); |
+ // Make rcx the space we have left. The stack might already be overflowed |
+ // here which will cause rcx to become negative. |
+ __ subp(rcx, kScratchRegister); |
+ __ sarp(rcx, Immediate(kPointerSizeLog2)); |
+ // Check if the arguments will overflow the stack. |
+ __ cmpp(rcx, r9); |
+ __ j(greater, &done, Label::kNear); // Signed comparison. |
+ __ TailCallRuntime(Runtime::kThrowStackOverflow); |
+ __ bind(&done); |
+ } |
+ |
+ // Put the evaluated spread onto the stack as additional arguments. |
+ { |
+ __ Set(rcx, 0); |
+ Label done, loop; |
+ __ bind(&loop); |
+ __ cmpl(rcx, r9); |
+ __ j(equal, &done, Label::kNear); |
+ __ movp(kScratchRegister, FieldOperand(rbx, rcx, times_pointer_size, |
+ FixedArray::kHeaderSize)); |
+ __ Push(kScratchRegister); |
+ __ incl(rcx); |
+ __ jmp(&loop); |
+ __ bind(&done); |
+ __ PushReturnAddressFrom(r8); |
+ } |
+} |
+ |
+// static |
+void Builtins::Generate_CallWithSpread(MacroAssembler* masm) { |
+ // ----------- S t a t e ------------- |
+ // -- rax : the number of arguments (not including the receiver) |
+ // -- rdi : the target to call (can be any Object) |
+ // ----------------------------------- |
+ |
+ // CheckSpreadAndPushToStack will push rdx to save it. |
+ __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); |
+ CheckSpreadAndPushToStack(masm); |
+ __ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny, |
+ TailCallMode::kDisallow), |
+ RelocInfo::CODE_TARGET); |
+} |
+ |
// static |
void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2892,124 +3031,7 @@ void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
// -- rdi : the constructor to call (can be any Object) |
// ----------------------------------- |
- // Load the spread argument into rbx. |
- __ movp(rbx, Operand(rsp, kPointerSize)); |
- // Load the map of the spread into r15. |
- __ movp(r15, FieldOperand(rbx, HeapObject::kMapOffset)); |
- // Load native context into r14. |
- __ movp(r14, NativeContextOperand()); |
- |
- Label runtime_call, push_args; |
- // Check that the spread is an array. |
- __ CmpInstanceType(r15, JS_ARRAY_TYPE); |
- __ j(not_equal, &runtime_call); |
- |
- // Check that we have the original ArrayPrototype. |
- __ movp(rcx, FieldOperand(r15, Map::kPrototypeOffset)); |
- __ cmpp(rcx, ContextOperand(r14, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
- __ j(not_equal, &runtime_call); |
- |
- // Check that the ArrayPrototype hasn't been modified in a way that would |
- // affect iteration. |
- __ LoadRoot(rcx, Heap::kArrayIteratorProtectorRootIndex); |
- __ Cmp(FieldOperand(rcx, Cell::kValueOffset), |
- Smi::FromInt(Isolate::kProtectorValid)); |
- __ j(not_equal, &runtime_call); |
- |
- // Check that the map of the initial array iterator hasn't changed. |
- __ movp(rcx, |
- ContextOperand(r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
- __ movp(rcx, FieldOperand(rcx, HeapObject::kMapOffset)); |
- __ cmpp(rcx, ContextOperand( |
- r14, Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
- __ j(not_equal, &runtime_call); |
- |
- // For FastPacked kinds, iteration will have the same effect as simply |
- // accessing each property in order. |
- Label no_protector_check; |
- __ movzxbp(rcx, FieldOperand(r15, Map::kBitField2Offset)); |
- __ DecodeField<Map::ElementsKindBits>(rcx); |
- __ cmpp(rcx, Immediate(FAST_HOLEY_ELEMENTS)); |
- __ j(above, &runtime_call); |
- // For non-FastHoley kinds, we can skip the protector check. |
- __ cmpp(rcx, Immediate(FAST_SMI_ELEMENTS)); |
- __ j(equal, &no_protector_check); |
- __ cmpp(rcx, Immediate(FAST_ELEMENTS)); |
- __ j(equal, &no_protector_check); |
- // Check the ArrayProtector cell. |
- __ LoadRoot(rcx, Heap::kArrayProtectorRootIndex); |
- __ Cmp(FieldOperand(rcx, PropertyCell::kValueOffset), |
- Smi::FromInt(Isolate::kProtectorValid)); |
- __ j(not_equal, &runtime_call); |
- |
- __ bind(&no_protector_check); |
- // Load the FixedArray backing store. |
- __ movp(rbx, FieldOperand(rbx, JSArray::kElementsOffset)); |
- __ jmp(&push_args); |
- |
- __ bind(&runtime_call); |
- { |
- // Call the builtin for the result of the spread. |
- FrameScope scope(masm, StackFrame::INTERNAL); |
- __ Push(rdi); // target |
- __ Push(rdx); // new target |
- __ Integer32ToSmi(rax, rax); |
- __ Push(rax); // nargs |
- __ Push(rbx); |
- __ CallRuntime(Runtime::kSpreadIterableFixed); |
- __ movp(rbx, rax); |
- __ Pop(rax); // nargs |
- __ SmiToInteger32(rax, rax); |
- __ Pop(rdx); // new target |
- __ Pop(rdi); // target |
- } |
- |
- __ bind(&push_args); |
- { |
- // Pop the return address and spread argument. |
- __ PopReturnAddressTo(r8); |
- __ Pop(rcx); |
- |
- // Calculate the new nargs including the result of the spread. |
- __ SmiToInteger32(r9, FieldOperand(rbx, FixedArray::kLengthOffset)); |
- // rax += r9 - 1. Subtract 1 for the spread itself. |
- __ leap(rax, Operand(rax, r9, times_1, -1)); |
- } |
- |
- // Check for stack overflow. |
- { |
- // Check the stack for overflow. We are not trying to catch interruptions |
- // (i.e. debug break and preemption) here, so check the "real stack limit". |
- Label done; |
- __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); |
- __ movp(rcx, rsp); |
- // Make rcx the space we have left. The stack might already be overflowed |
- // here which will cause rcx to become negative. |
- __ subp(rcx, kScratchRegister); |
- __ sarp(rcx, Immediate(kPointerSizeLog2)); |
- // Check if the arguments will overflow the stack. |
- __ cmpp(rcx, r9); |
- __ j(greater, &done, Label::kNear); // Signed comparison. |
- __ TailCallRuntime(Runtime::kThrowStackOverflow); |
- __ bind(&done); |
- } |
- |
- // Put the evaluated spread onto the stack as additional arguments. |
- { |
- __ Set(rcx, 0); |
- Label done, loop; |
- __ bind(&loop); |
- __ cmpl(rcx, r9); |
- __ j(equal, &done, Label::kNear); |
- __ movp(kScratchRegister, FieldOperand(rbx, rcx, times_pointer_size, |
- FixedArray::kHeaderSize)); |
- __ Push(kScratchRegister); |
- __ incl(rcx); |
- __ jmp(&loop); |
- __ bind(&done); |
- __ PushReturnAddressFrom(r8); |
- } |
- // Dispatch. |
+ CheckSpreadAndPushToStack(masm); |
__ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
} |