Index: src/mips/code-stubs-mips.cc |
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc |
index 0d00f7bbb21e128933833045c548b6980174daa4..6d06bd923843944ba7658a7e2d42a6cdc350d042 100644 |
--- a/src/mips/code-stubs-mips.cc |
+++ b/src/mips/code-stubs-mips.cc |
@@ -3093,18 +3093,20 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
// function without changing the state. |
__ Branch(&done, eq, t0, Operand(a1)); |
- // If we came here, we need to see if we are the array function. |
- // If we didn't have a matching function, and we didn't find the megamorph |
- // sentinel, then we have in the slot either some other function or an |
- // AllocationSite. Do a map check on the object in a3. |
- __ lw(t1, FieldMemOperand(t0, 0)); |
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
- __ Branch(&miss, ne, t1, Operand(at)); |
- |
- // Make sure the function is the Array() function |
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
- __ Branch(&megamorphic, ne, a1, Operand(t0)); |
- __ jmp(&done); |
+ if (!FLAG_pretenuring_call_new) { |
+ // If we came here, we need to see if we are the array function. |
+ // If we didn't have a matching function, and we didn't find the megamorph |
+ // sentinel, then we have in the slot either some other function or an |
+ // AllocationSite. Do a map check on the object in a3. |
+ __ lw(t1, FieldMemOperand(t0, 0)); |
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
+ __ Branch(&miss, ne, t1, Operand(at)); |
+ |
+ // Make sure the function is the Array() function |
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
+ __ Branch(&megamorphic, ne, a1, Operand(t0)); |
+ __ jmp(&done); |
+ } |
__ bind(&miss); |
@@ -3121,36 +3123,38 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
__ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize)); |
__ jmp(&done); |
- // An uninitialized cache is patched with the function or sentinel to |
- // indicate the ElementsKind if function is the Array constructor. |
+ // An uninitialized cache is patched with the function. |
__ bind(&initialize); |
- // Make sure the function is the Array() function |
- __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
- __ Branch(¬_array_function, ne, a1, Operand(t0)); |
+ if (!FLAG_pretenuring_call_new) { |
+ // Make sure the function is the Array() function. |
+ __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, t0); |
+ __ Branch(¬_array_function, ne, a1, Operand(t0)); |
+ |
+ // The target function is the Array constructor, |
+ // Create an AllocationSite if we don't already have it, store it in the |
+ // slot. |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ const RegList kSavedRegs = |
+ 1 << 4 | // a0 |
+ 1 << 5 | // a1 |
+ 1 << 6 | // a2 |
+ 1 << 7; // a3 |
- // The target function is the Array constructor. |
- // Create an AllocationSite if we don't already have it, store it in the slot. |
- { |
- FrameScope scope(masm, StackFrame::INTERNAL); |
- const RegList kSavedRegs = |
- 1 << 4 | // a0 |
- 1 << 5 | // a1 |
- 1 << 6 | // a2 |
- 1 << 7; // a3 |
+ // Arguments register must be smi-tagged to call out. |
+ __ SmiTag(a0); |
+ __ MultiPush(kSavedRegs); |
- // Arguments register must be smi-tagged to call out. |
- __ SmiTag(a0); |
- __ MultiPush(kSavedRegs); |
+ CreateAllocationSiteStub create_stub; |
+ __ CallStub(&create_stub); |
- CreateAllocationSiteStub create_stub; |
- __ CallStub(&create_stub); |
+ __ MultiPop(kSavedRegs); |
+ __ SmiUntag(a0); |
+ } |
+ __ Branch(&done); |
- __ MultiPop(kSavedRegs); |
- __ SmiUntag(a0); |
+ __ bind(¬_array_function); |
} |
- __ Branch(&done); |
- |
- __ bind(¬_array_function); |
__ sll(t0, a3, kPointerSizeLog2 - kSmiTagSize); |
__ Addu(t0, a2, Operand(t0)); |
@@ -3288,18 +3292,26 @@ void CallConstructStub::Generate(MacroAssembler* masm) { |
__ Branch(&slow, ne, t0, Operand(JS_FUNCTION_TYPE)); |
if (RecordCallTarget()) { |
- Label feedback_register_initialized; |
GenerateRecordCallTarget(masm); |
- // Put the AllocationSite from the feedback vector into a2, or undefined. |
__ sll(at, a3, kPointerSizeLog2 - kSmiTagSize); |
__ Addu(t1, a2, at); |
- __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize)); |
- __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset)); |
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
- __ Branch(&feedback_register_initialized, eq, t1, Operand(at)); |
- __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
- __ bind(&feedback_register_initialized); |
+ if (FLAG_pretenuring_call_new) { |
+ // Put the AllocationSite from the feedback vector into a2. |
+ // By adding kPointerSize we encode that we know the AllocationSite |
+ // entry is at the feedback vector slot given by a3 + 1. |
+ __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize + kPointerSize)); |
+ } else { |
+ Label feedback_register_initialized; |
+ // Put the AllocationSite from the feedback vector into a2, or undefined. |
+ __ lw(a2, FieldMemOperand(t1, FixedArray::kHeaderSize)); |
+ __ lw(t1, FieldMemOperand(a2, AllocationSite::kMapOffset)); |
+ __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex); |
+ __ Branch(&feedback_register_initialized, eq, t1, Operand(at)); |
+ __ LoadRoot(a2, Heap::kUndefinedValueRootIndex); |
+ __ bind(&feedback_register_initialized); |
+ } |
+ |
__ AssertUndefinedOrAllocationSite(a2, t1); |
} |