Index: src/arm/code-stubs-arm.cc |
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc |
index 44ed3a6fc67845a5e30f81bd231edd79685704ab..7b2935106ff59e2995a90737fe75adf05238bbfb 100644 |
--- a/src/arm/code-stubs-arm.cc |
+++ b/src/arm/code-stubs-arm.cc |
@@ -2856,11 +2856,62 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
} |
+static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
+ // Do not transform the receiver for strict mode functions. |
+ __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
+ __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); |
+ __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
+ kSmiTagSize))); |
+ __ b(ne, cont); |
+ |
+ // Do not transform the receiver for native (Compilerhints already in r3). |
+ __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
+ __ b(ne, cont); |
+} |
+ |
+ |
+static void EmitSlowCase(MacroAssembler* masm, |
+ int argc, |
+ Label* non_function) { |
+ // Check for function proxy. |
+ __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); |
+ __ b(ne, non_function); |
+ __ push(r1); // put proxy as additional argument |
+ __ mov(r0, Operand(argc + 1, RelocInfo::NONE32)); |
+ __ mov(r2, Operand::Zero()); |
+ __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); |
+ { |
+ Handle<Code> adaptor = |
+ masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
+ __ Jump(adaptor, RelocInfo::CODE_TARGET); |
+ } |
+ |
+ // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
+ // of the original receiver from the call site). |
+ __ bind(non_function); |
+ __ str(r1, MemOperand(sp, argc * kPointerSize)); |
+ __ mov(r0, Operand(argc)); // Set up the number of arguments. |
+ __ mov(r2, Operand::Zero()); |
+ __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); |
+ __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
+ RelocInfo::CODE_TARGET); |
+} |
+ |
+ |
+static void EmitWrapCase(MacroAssembler* masm, int argc, Label* cont) { |
+ // Wrap the receiver and patch it back onto the stack. |
+ { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
+ __ Push(r1, r3); |
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
+ __ pop(r1); |
+ } |
+ __ str(r0, MemOperand(sp, argc * kPointerSize)); |
+ __ jmp(cont); |
+} |
+ |
+ |
void CallFunctionStub::Generate(MacroAssembler* masm) { |
// r1 : the function to call |
- // r2 : feedback vector |
- // r3 : (only if r2 is not the megamorphic symbol) slot in feedback |
- // vector (Smi) |
Label slow, non_function, wrap, cont; |
if (NeedsChecks()) { |
@@ -2871,36 +2922,20 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
// Goto slow case if we do not have a function. |
__ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
__ b(ne, &slow); |
- |
- if (RecordCallTarget()) { |
- GenerateRecordCallTarget(masm); |
- // Type information was updated. Because we may call Array, which |
- // expects either undefined or an AllocationSite in ebx we need |
- // to set ebx to undefined. |
- __ LoadRoot(r2, Heap::kUndefinedValueRootIndex); |
- } |
} |
// Fast-case: Invoke the function now. |
// r1: pushed function |
- ParameterCount actual(argc_); |
+ int argc = argc_; |
+ ParameterCount actual(argc); |
if (CallAsMethod()) { |
if (NeedsChecks()) { |
- // Do not transform the receiver for strict mode functions. |
- __ ldr(r3, FieldMemOperand(r1, JSFunction::kSharedFunctionInfoOffset)); |
- __ ldr(r4, FieldMemOperand(r3, SharedFunctionInfo::kCompilerHintsOffset)); |
- __ tst(r4, Operand(1 << (SharedFunctionInfo::kStrictModeFunction + |
- kSmiTagSize))); |
- __ b(ne, &cont); |
- |
- // Do not transform the receiver for native (Compilerhints already in r3). |
- __ tst(r4, Operand(1 << (SharedFunctionInfo::kNative + kSmiTagSize))); |
- __ b(ne, &cont); |
+ EmitContinueIfStrictOrNative(masm, &cont); |
} |
// Compute the receiver in sloppy mode. |
- __ ldr(r3, MemOperand(sp, argc_ * kPointerSize)); |
+ __ ldr(r3, MemOperand(sp, argc * kPointerSize)); |
if (NeedsChecks()) { |
__ JumpIfSmi(r3, &wrap); |
@@ -2912,55 +2947,18 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
__ bind(&cont); |
} |
+ |
__ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); |
if (NeedsChecks()) { |
// Slow-case: Non-function called. |
__ bind(&slow); |
- if (RecordCallTarget()) { |
- // If there is a call target cache, mark it megamorphic in the |
- // non-function case. MegamorphicSentinel is an immortal immovable |
- // object (megamorphic symbol) so no write barrier is needed. |
- ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()), |
- isolate()->heap()->megamorphic_symbol()); |
- __ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3)); |
- __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex); |
- __ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize)); |
- } |
- // Check for function proxy. |
- __ cmp(r4, Operand(JS_FUNCTION_PROXY_TYPE)); |
- __ b(ne, &non_function); |
- __ push(r1); // put proxy as additional argument |
- __ mov(r0, Operand(argc_ + 1, RelocInfo::NONE32)); |
- __ mov(r2, Operand::Zero()); |
- __ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY); |
- { |
- Handle<Code> adaptor = |
- isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
- __ Jump(adaptor, RelocInfo::CODE_TARGET); |
- } |
- |
- // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
- // of the original receiver from the call site). |
- __ bind(&non_function); |
- __ str(r1, MemOperand(sp, argc_ * kPointerSize)); |
- __ mov(r0, Operand(argc_)); // Set up the number of arguments. |
- __ mov(r2, Operand::Zero()); |
- __ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION); |
- __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
- RelocInfo::CODE_TARGET); |
+ EmitSlowCase(masm, argc, &non_function); |
} |
if (CallAsMethod()) { |
__ bind(&wrap); |
- // Wrap the receiver and patch it back onto the stack. |
- { FrameAndConstantPoolScope frame_scope(masm, StackFrame::INTERNAL); |
- __ Push(r1, r3); |
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
- __ pop(r1); |
- } |
- __ str(r0, MemOperand(sp, argc_ * kPointerSize)); |
- __ jmp(&cont); |
+ EmitWrapCase(masm, argc, &cont); |
} |
} |
@@ -3029,6 +3027,109 @@ void CallConstructStub::Generate(MacroAssembler* masm) { |
} |
+static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { |
+ __ ldr(vector, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
+ __ ldr(vector, FieldMemOperand(vector, |
+ JSFunction::kSharedFunctionInfoOffset)); |
+ __ ldr(vector, FieldMemOperand(vector, |
+ SharedFunctionInfo::kFeedbackVectorOffset)); |
+} |
+ |
+ |
+void CallICStub::Generate(MacroAssembler* masm) { |
+ // r1 - function |
+ // r3 - slot id (Smi) |
+ Label extra_checks_or_miss, slow_start; |
+ Label slow, non_function, wrap, cont; |
+ Label have_js_function; |
+ int argc = state_.arg_count(); |
+ ParameterCount actual(argc); |
+ |
+ EmitLoadTypeFeedbackVector(masm, r2); |
+ |
+ // The checks. First, does r1 match the recorded monomorphic target? |
+ __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
+ __ ldr(r4, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
+ __ cmp(r1, r4); |
+ __ b(ne, &extra_checks_or_miss); |
+ |
+ __ bind(&have_js_function); |
+ if (state_.CallAsMethod()) { |
+ EmitContinueIfStrictOrNative(masm, &cont); |
+ // Compute the receiver in sloppy mode. |
+ __ ldr(r3, MemOperand(sp, argc * kPointerSize)); |
+ |
+ __ JumpIfSmi(r3, &wrap); |
+ __ CompareObjectType(r3, r4, r4, FIRST_SPEC_OBJECT_TYPE); |
+ __ b(lt, &wrap); |
+ |
+ __ bind(&cont); |
+ } |
+ |
+ __ InvokeFunction(r1, actual, JUMP_FUNCTION, NullCallWrapper()); |
+ |
+ __ bind(&slow); |
+ EmitSlowCase(masm, argc, &non_function); |
+ |
+ if (state_.CallAsMethod()) { |
+ __ bind(&wrap); |
+ EmitWrapCase(masm, argc, &cont); |
+ } |
+ |
+ __ bind(&extra_checks_or_miss); |
+ Label miss; |
+ |
+ __ CompareRoot(r4, Heap::kMegamorphicSymbolRootIndex); |
+ __ b(eq, &slow_start); |
+ __ CompareRoot(r4, Heap::kUninitializedSymbolRootIndex); |
+ __ b(eq, &miss); |
+ |
+ if (!FLAG_trace_ic) { |
+ // We are going megamorphic, and we don't want to visit the runtime. |
+ __ add(r4, r2, Operand::PointerOffsetFromSmiKey(r3)); |
+ __ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex); |
+ __ str(ip, FieldMemOperand(r4, FixedArray::kHeaderSize)); |
+ __ jmp(&slow_start); |
+ } |
+ |
+ // We are here because tracing is on or we are going monomorphic. |
+ __ bind(&miss); |
+ GenerateMiss(masm); |
+ |
+ // the slow case |
+ __ bind(&slow_start); |
+ // Check that the function is really a JavaScript function. |
+ // r1: pushed function (to be verified) |
+ __ JumpIfSmi(r1, &non_function); |
+ |
+ // Goto slow case if we do not have a function. |
+ __ CompareObjectType(r1, r4, r4, JS_FUNCTION_TYPE); |
+ __ b(ne, &slow); |
+ __ jmp(&have_js_function); |
+} |
+ |
+ |
+void CallICStub::GenerateMiss(MacroAssembler* masm) { |
+ // Get the receiver of the function from the stack; 1 ~ return address. |
+ __ ldr(r4, MemOperand(sp, (state_.arg_count() + 1) * kPointerSize)); |
+ |
+ { |
+ FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); |
+ |
+ // Push the receiver and the function and feedback info. |
+ __ Push(r4, r1, r2, r3); |
+ |
+ // Call the entry. |
+ ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), |
+ masm->isolate()); |
+ __ CallExternalReference(miss, 4); |
+ |
+ // Move result to edi and exit the internal frame. |
+ __ mov(r1, r0); |
+ } |
+} |
+ |
+ |
// StringCharCodeAtGenerator |
void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) { |
Label flat_string; |