Index: src/builtins/ia32/builtins-ia32.cc |
diff --git a/src/builtins/ia32/builtins-ia32.cc b/src/builtins/ia32/builtins-ia32.cc |
index bd638672b1efa061881dcccaf2dac027e10983bf..031f8a673d2f9f3963e76c7eff938cb463f01c31 100644 |
--- a/src/builtins/ia32/builtins-ia32.cc |
+++ b/src/builtins/ia32/builtins-ia32.cc |
@@ -943,6 +943,52 @@ void Builtins::Generate_InterpreterPushArgsAndConstructArray( |
} |
} |
+// static |
+void Builtins::Generate_InterpreterPushArgsAndConstructWithSpread( |
+ MacroAssembler* masm) { |
+ // ----------- S t a t e ------------- |
+ // -- eax : the number of arguments (not including the receiver) |
+ // -- edx : the new target |
+ // -- edi : the constructor |
+ // -- ebx : allocation site feedback (if available or undefined) |
+ // -- ecx : the address of the first argument to be pushed. Subsequent |
+ // arguments should be consecutive above this, in the same order as |
+ // they are to be pushed onto the stack. |
+ // ----------------------------------- |
+ Label stack_overflow; |
+ // We need two scratch registers. Push edi and edx onto stack. |
+ __ Push(edi); |
+ __ Push(edx); |
+ |
+ // Push arguments and move return address to the top of stack. |
+ // The eax register is readonly. The ecx register will be modified. The edx |
+ // and edi registers will be modified but restored to their original values. |
+ Generate_InterpreterPushArgsAndReturnAddress(masm, eax, ecx, edx, edi, false, |
+ 2, &stack_overflow); |
+ |
+ // Restore edi and edx |
+ __ Pop(edx); |
+ __ Pop(edi); |
+ |
+ __ AssertUndefinedOrAllocationSite(ebx); |
+ // Call the constructor with unmodified eax, edi, edx values. |
+ |
+ __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), |
+ RelocInfo::CODE_TARGET); |
+ |
+ __ bind(&stack_overflow); |
+ { |
+ // Pop the temporary registers, so that return address is on top of stack. |
+ __ Pop(edx); |
+ __ Pop(edi); |
+ |
+ __ TailCallRuntime(Runtime::kThrowStackOverflow); |
+ |
+ // This should be unreachable. |
+ __ int3(); |
+ } |
+} |
+ |
static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) { |
// Set the return address to the correct point in the interpreter entry |
// trampoline. |
@@ -2795,6 +2841,168 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { |
} |
// static |
+void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
+ // ----------- S t a t e ------------- |
+ // -- eax : the number of arguments (not including the receiver) |
+ // -- edx : the new target (either the same as the constructor or |
+ // the JSFunction on which new was invoked initially) |
+ // -- edi : the constructor to call (can be any Object) |
+ // ----------------------------------- |
+ |
+ // Free up some registers. |
+ __ movd(xmm0, edx); |
+ __ movd(xmm1, edi); |
+ |
+ Register argc = eax; |
+ |
+ Register scratch = ecx; |
+ Register scratch2 = edi; |
+ |
+ Register spread = ebx; |
+ Register spread_map = edx; |
+ |
+ __ mov(spread, Operand(esp, kPointerSize)); |
+ __ mov(spread_map, FieldOperand(spread, HeapObject::kMapOffset)); |
+ |
+ Label runtime_call, push_args; |
+ // Check that the spread is an array. |
+ __ CmpInstanceType(spread_map, JS_ARRAY_TYPE); |
+ __ j(not_equal, &runtime_call); |
+ |
+ // Check that we have the original ArrayPrototype. |
+ __ mov(scratch, FieldOperand(spread_map, Map::kPrototypeOffset)); |
+ __ mov(scratch2, NativeContextOperand()); |
+ __ cmp(scratch, |
+ ContextOperand(scratch2, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
+ __ j(not_equal, &runtime_call); |
+ |
+ // Check that the ArrayPrototype hasn't been modified in a way that would |
+ // affect iteration. |
+ __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); |
+ __ cmp(FieldOperand(scratch, Cell::kValueOffset), |
+ Immediate(Smi::FromInt(Isolate::kProtectorValid))); |
+ __ j(not_equal, &runtime_call); |
+ |
+ // Check that the map of the initial array iterator hasn't changed. |
+ __ mov(scratch2, NativeContextOperand()); |
+ __ mov(scratch, |
+ ContextOperand(scratch2, |
+ Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
+ __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset)); |
+ __ cmp(scratch, |
+ ContextOperand(scratch2, |
+ Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
+ __ j(not_equal, &runtime_call); |
+ |
+ // For FastPacked kinds, iteration will have the same effect as simply |
+ // accessing each property in order. |
+ Label no_protector_check; |
+ __ mov(scratch, FieldOperand(spread_map, Map::kBitField2Offset)); |
+ __ DecodeField<Map::ElementsKindBits>(scratch); |
+ __ cmp(scratch, Immediate(LAST_FAST_ELEMENTS_KIND)); |
+ __ j(above, &runtime_call); |
+ // For non-FastHoley kinds, we can skip the protector check. |
+ __ cmp(scratch, Immediate(FAST_SMI_ELEMENTS)); |
+ __ j(equal, &no_protector_check); |
+ __ cmp(scratch, Immediate(FAST_ELEMENTS)); |
+ __ j(equal, &no_protector_check); |
+ __ cmp(scratch, Immediate(FAST_DOUBLE_ELEMENTS)); |
+ __ j(equal, &no_protector_check); |
+ // Check the ArrayProtector cell. |
+ __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
+ __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset), |
+ Immediate(Smi::FromInt(Isolate::kProtectorValid))); |
+ __ j(not_equal, &runtime_call); |
+ |
+ __ bind(&no_protector_check); |
+ // Load the FixedArray backing store. |
+ __ mov(spread, FieldOperand(spread, JSArray::kElementsOffset)); |
+ // Free up some registers. |
+ __ jmp(&push_args); |
+ |
+ __ bind(&runtime_call); |
+ { |
+ // Call the builtin for the result of the spread. |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ // Need to save these on the stack. |
+ __ movd(edi, xmm1); |
+ __ movd(edx, xmm0); |
+ __ Push(edi); |
+ __ Push(edx); |
+ __ SmiTag(argc); |
+ __ Push(argc); |
+ __ Push(spread); |
+ __ CallRuntime(Runtime::kSpreadIterableFixed); |
+ __ mov(spread, eax); |
+ __ Pop(argc); |
+ __ SmiUntag(argc); |
+ __ Pop(edx); |
+ __ Pop(edi); |
+ // Free up some registers. |
+ __ movd(xmm0, edx); |
+ __ movd(xmm1, edi); |
+ } |
+ |
+ Register spread_len = edx; |
+ Register return_address = edi; |
+ __ bind(&push_args); |
+ { |
+ // Pop the return address and spread argument. |
+ __ PopReturnAddressTo(return_address); |
+ __ Pop(scratch); |
+ |
+ // Calculate the new nargs including the result of the spread. |
+ __ mov(spread_len, FieldOperand(spread, FixedArray::kLengthOffset)); |
+ __ SmiUntag(spread_len); |
+ // argc += spread_len - 1. Subtract 1 for the spread itself. |
+ __ lea(argc, Operand(argc, spread_len, times_1, -1)); |
+ } |
+ |
+ // Check for stack overflow. |
+ { |
+ // Check the stack for overflow. We are not trying to catch interruptions |
+ // (i.e. debug break and preemption) here, so check the "real stack limit". |
+ Label done; |
+ __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); |
+ // Make scratch the space we have left. The stack might already be |
+ // overflowed here which will cause scratch to become negative. |
+ __ neg(scratch); |
+ __ add(scratch, esp); |
+ __ sar(scratch, kPointerSizeLog2); |
+ // Check if the arguments will overflow the stack. |
+ __ cmp(scratch, spread_len); |
+ __ j(greater, &done, Label::kNear); // Signed comparison. |
+ __ TailCallRuntime(Runtime::kThrowStackOverflow); |
+ __ bind(&done); |
+ } |
+ |
+ // Put the evaluated spread onto the stack as additional arguments. |
+ { |
+ Register scratch2 = esi; |
+ __ movd(xmm2, esi); |
+ |
+ __ mov(scratch, Immediate(0)); |
+ Label done, loop; |
+ __ bind(&loop); |
+ __ cmp(scratch, spread_len); |
+ __ j(equal, &done, Label::kNear); |
+ __ mov(scratch2, FieldOperand(spread, scratch, times_pointer_size, |
+ FixedArray::kHeaderSize)); |
+ __ Push(scratch2); |
+ __ inc(scratch); |
+ __ jmp(&loop); |
+ __ bind(&done); |
+ __ PushReturnAddressFrom(return_address); |
+ __ movd(esi, xmm2); |
+ __ movd(edi, xmm1); |
+ __ movd(edx, xmm0); |
+ } |
+ |
+ // Dispatch. |
+ __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
+} |
+ |
+// static |
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- edx : requested object size (untagged) |