Index: src/builtins/mips/builtins-mips.cc |
diff --git a/src/builtins/mips/builtins-mips.cc b/src/builtins/mips/builtins-mips.cc |
index 946c8bbdb478c085220d745e2a8ad57b8c7beb93..17e17034bb8c21c29e0122dc628d3c38ad547c9e 100644 |
--- a/src/builtins/mips/builtins-mips.cc |
+++ b/src/builtins/mips/builtins-mips.cc |
@@ -1196,7 +1196,7 @@ void Builtins::Generate_InterpreterPushArgsAndCallImpl( |
// static |
void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
- MacroAssembler* masm, CallableType construct_type) { |
+ MacroAssembler* masm, PushArgsConstructMode mode) { |
// ----------- S t a t e ------------- |
// -- a0 : argument count (not including receiver) |
// -- a3 : new target |
@@ -1213,7 +1213,7 @@ void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
Generate_InterpreterPushArgs(masm, a0, t4, t1, t0, &stack_overflow); |
__ AssertUndefinedOrAllocationSite(a2, t0); |
- if (construct_type == CallableType::kJSFunction) { |
+ if (mode == PushArgsConstructMode::kJSFunction) { |
__ AssertFunction(a1); |
// Tail call to the function-specific construct stub (still in the caller |
@@ -1222,8 +1222,12 @@ void Builtins::Generate_InterpreterPushArgsAndConstructImpl( |
__ lw(t0, FieldMemOperand(t0, SharedFunctionInfo::kConstructStubOffset)); |
__ Addu(at, t0, Operand(Code::kHeaderSize - kHeapObjectTag)); |
__ Jump(at); |
+ } else if (mode == PushArgsConstructMode::kWithFinalSpread) { |
+ // Call the constructor with a0, a1, and a3 unmodified. |
+ __ Jump(masm->isolate()->builtins()->ConstructWithSpread(), |
+ RelocInfo::CODE_TARGET); |
} else { |
- DCHECK_EQ(construct_type, CallableType::kAny); |
+ DCHECK_EQ(PushArgsConstructMode::kOther, mode); |
// Call the constructor with a0, a1, and a3 unmodified. |
__ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
} |
@@ -2795,6 +2799,139 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { |
} |
// static |
+void Builtins::Generate_ConstructWithSpread(MacroAssembler* masm) { |
+ // ----------- S t a t e ------------- |
+ // -- a0 : the number of arguments (not including the receiver) |
+ // -- a1 : the constructor to call (can be any Object) |
+ // -- a3 : the new target (either the same as the constructor or |
+ // the JSFunction on which new was invoked initially) |
+ // ----------------------------------- |
+ |
+ Register argc = a0; |
+ Register constructor = a1; |
+ Register new_target = a3; |
+ |
+ Register scratch = t0; |
+ Register scratch2 = t1; |
+ |
+ Register spread = a2; |
+ Register spread_map = t3; |
+ |
+ Register native_context = t4; |
+ |
+ __ lw(spread, MemOperand(sp, 0)); |
+ __ lw(spread_map, FieldMemOperand(spread, HeapObject::kMapOffset)); |
+ __ lw(native_context, NativeContextMemOperand()); |
+ |
+ Label runtime_call, push_args; |
+ // Check that the spread is an array. |
+ __ lbu(scratch, FieldMemOperand(spread_map, Map::kInstanceTypeOffset)); |
+ __ Branch(&runtime_call, ne, scratch, Operand(JS_ARRAY_TYPE)); |
+ |
+ // Check that we have the original ArrayPrototype. |
+ __ lw(scratch, FieldMemOperand(spread_map, Map::kPrototypeOffset)); |
+ __ lw(scratch2, ContextMemOperand(native_context, |
+ Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); |
+ __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
+ |
+ // Check that the ArrayPrototype hasn't been modified in a way that would |
+ // affect iteration. |
+ __ LoadRoot(scratch, Heap::kArrayIteratorProtectorRootIndex); |
+ __ lw(scratch, FieldMemOperand(scratch, Cell::kValueOffset)); |
+ __ Branch(&runtime_call, ne, scratch, |
+ Operand(Smi::FromInt(Isolate::kProtectorValid))); |
+ |
+ // Check that the map of the initial array iterator hasn't changed. |
+ __ lw(scratch, |
+ ContextMemOperand(native_context, |
+ Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_INDEX)); |
+ __ lw(scratch, FieldMemOperand(scratch, HeapObject::kMapOffset)); |
+ __ lw(scratch2, |
+ ContextMemOperand(native_context, |
+ Context::INITIAL_ARRAY_ITERATOR_PROTOTYPE_MAP_INDEX)); |
+ __ Branch(&runtime_call, ne, scratch, Operand(scratch2)); |
+ |
+ // For FastPacked kinds, iteration will have the same effect as simply |
+ // accessing each property in order. |
+ Label no_protector_check; |
+ __ lbu(scratch, FieldMemOperand(spread_map, Map::kBitField2Offset)); |
+ __ DecodeField<Map::ElementsKindBits>(scratch); |
+ __ Branch(&runtime_call, hi, scratch, Operand(LAST_FAST_ELEMENTS_KIND)); |
+ // For non-FastHoley kinds, we can skip the protector check. |
+ __ Branch(&no_protector_check, eq, scratch, Operand(FAST_SMI_ELEMENTS)); |
+ __ Branch(&no_protector_check, eq, scratch, Operand(FAST_ELEMENTS)); |
+ __ Branch(&no_protector_check, eq, scratch, Operand(FAST_DOUBLE_ELEMENTS)); |
+ // Check the ArrayProtector cell. |
+ __ LoadRoot(scratch, Heap::kArrayProtectorRootIndex); |
+ __ lw(scratch, FieldMemOperand(scratch, PropertyCell::kValueOffset)); |
+ __ Branch(&runtime_call, ne, scratch, |
+ Operand(Smi::FromInt(Isolate::kProtectorValid))); |
+ |
+ __ bind(&no_protector_check); |
+ // Load the FixedArray backing store. |
+ __ lw(spread, FieldMemOperand(spread, JSArray::kElementsOffset)); |
+ __ Branch(&push_args); |
+ |
+ __ bind(&runtime_call); |
+ { |
+ // Call the builtin for the result of the spread. |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ __ SmiTag(argc); |
+ __ Push(constructor, new_target, argc, spread); |
+ __ CallRuntime(Runtime::kSpreadIterableFixed); |
+ __ mov(spread, v0); |
+ __ Pop(constructor, new_target, argc); |
+ __ SmiUntag(argc); |
+ } |
+ |
+ Register spread_len = t3; |
+ __ bind(&push_args); |
+ { |
+ // Pop the spread argument off the stack. |
+ __ Pop(scratch); |
+ // Calculate the new nargs including the result of the spread. |
+ __ lw(spread_len, FieldMemOperand(spread, FixedArray::kLengthOffset)); |
+ __ SmiUntag(spread_len); |
+ // argc += spread_len - 1. Subtract 1 for the spread itself. |
+ __ Addu(argc, argc, spread_len); |
+ __ Subu(argc, argc, Operand(1)); |
+ } |
+ |
+ // Check for stack overflow. |
+ { |
+ // Check the stack for overflow. We are not trying to catch interruptions |
+ // (i.e. debug break and preemption) here, so check the "real stack limit". |
+ Label done; |
+ __ LoadRoot(scratch, Heap::kRealStackLimitRootIndex); |
+ // Make scratch the space we have left. The stack might already be |
+ // overflowed here which will cause ip to become negative. |
+ __ Subu(scratch, sp, scratch); |
+ // Check if the arguments will overflow the stack. |
+ __ sll(at, spread_len, kPointerSizeLog2); |
+ __ Branch(&done, gt, scratch, Operand(at)); // Signed comparison. |
+ __ TailCallRuntime(Runtime::kThrowStackOverflow); |
+ __ bind(&done); |
+ } |
+ |
+ // Put the evaluated spread onto the stack as additional arguments. |
+ { |
+ __ mov(scratch, zero_reg); |
+ Label done, loop; |
+ __ bind(&loop); |
+ __ Branch(&done, eq, scratch, Operand(spread_len)); |
+ __ Lsa(scratch2, spread, scratch, kPointerSizeLog2); |
+ __ lw(scratch2, FieldMemOperand(scratch2, FixedArray::kHeaderSize)); |
+ __ Push(scratch2); |
+ __ Addu(scratch, scratch, Operand(1)); |
+ __ Branch(&loop); |
+ __ bind(&done); |
+ } |
+ |
+ // Dispatch. |
+ __ Jump(masm->isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET); |
+} |
+ |
+// static |
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : requested object size (untagged) |