Index: src/ia32/code-stubs-ia32.cc |
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc |
index d1725bbdb13dc1768b0e089566b184e78d8b5708..ca104593842082c95b394877b6331bb52a40849c 100644 |
--- a/src/ia32/code-stubs-ia32.cc |
+++ b/src/ia32/code-stubs-ia32.cc |
@@ -2421,6 +2421,176 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
} |
+void CallICStub::GenerateCall(MacroAssembler* masm, bool monomorphic, |
Toon Verwaest
2014/03/10 13:50:44
one arg per line
mvstanton
2014/03/20 15:51:53
Done.
|
+ bool args_match, bool strict_or_native) { |
+ Isolate* isolate = masm->isolate(); |
+ Label slow, non_function, cont, wrap; |
+ ParameterCount actual(arg_count()); |
+ |
+ if (!monomorphic) { |
+ // Check that the function really is a JavaScript function. |
+ __ JumpIfSmi(edi, &non_function); |
+ |
+ // Goto slow case if we do not have a function. |
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |
+ __ j(not_equal, &slow); |
+ } |
+ |
+ if (call_as_method()) { |
+ if (!monomorphic) { |
+ // Do not transform the receiver for strict mode functions. |
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); |
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), |
+ 1 << SharedFunctionInfo::kStrictModeBitWithinByte); |
+ __ j(not_equal, &cont); |
+ |
+ // Do not transform the receiver for natives (shared already in ecx). |
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), |
+ 1 << SharedFunctionInfo::kNativeBitWithinByte); |
+ __ j(not_equal, &cont); |
+ } |
+ |
+ if (!strict_or_native) { |
+ // Load the receiver from the stack. |
+ __ mov(eax, Operand(esp, (arg_count() + 1) * kPointerSize)); |
+ |
+ __ JumpIfSmi(eax, &wrap); |
+ |
+ __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); |
+ __ j(below, &wrap); |
+ } |
+ |
+ __ bind(&cont); |
+ } |
+ |
+ if (args_match) { |
+ __ InvokeFunction(edi, actual, actual, JUMP_FUNCTION, NullCallWrapper()); |
+ } else { |
+ __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); |
+ } |
+ |
+ if (!monomorphic) { |
+ // Slow-case: Non-function called. |
+ __ bind(&slow); |
+ // Check for function proxy. |
+ __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); |
+ __ j(not_equal, &non_function); |
+ __ pop(ecx); |
+ __ push(edi); // put proxy as additional argument under return address |
+ __ push(ecx); |
+ __ Set(eax, Immediate(arg_count() + 1)); |
+ __ Set(ebx, Immediate(0)); |
+ __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); |
+ { |
+ Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
+ __ jmp(adaptor, RelocInfo::CODE_TARGET); |
+ } |
+ |
+ // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
+ // of the original receiver from the call site). |
+ __ bind(&non_function); |
+ __ mov(Operand(esp, (arg_count() + 1) * kPointerSize), edi); |
+ __ Set(eax, Immediate(arg_count())); |
+ __ Set(ebx, Immediate(0)); |
+ __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); |
+ Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
+ __ jmp(adaptor, RelocInfo::CODE_TARGET); |
+ } |
+ |
+ if (call_as_method() && !strict_or_native) { |
+ __ bind(&wrap); |
+ // Wrap the receiver and patch it back onto the stack. |
+ { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
+ __ push(edi); |
+ __ push(eax); |
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
+ __ pop(edi); |
+ } |
+ __ mov(Operand(esp, (arg_count() + 1) * kPointerSize), eax); |
+ __ jmp(&cont); |
+ } |
+} |
+ |
+ |
+void CallICStub::Generate(MacroAssembler* masm) { |
+ // edi - function |
+ // ebx - vector |
+ // edx - slot id |
+ Label mono, slow; |
+ |
+ Isolate* isolate = masm->isolate(); |
+ if (!monomorphic()) { |
+ Label miss_uninit; |
+ |
+ // The checks. First, does edi match the cell? |
+ __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, |
+ FixedArray::kHeaderSize)); |
+ __ cmp(ecx, edi); |
Toon Verwaest
2014/03/10 13:50:44
Wouldn't it be faster to flip over the conditions,
mvstanton
2014/03/20 15:51:53
Excellent idea, it reads cleaner too.
|
+ __ j(equal, &mono); |
+ __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); |
+ __ j(equal, &slow); |
+ __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate))); |
+ __ j(equal, &miss_uninit); |
+ // If we got here we went megamorphic. Don't bother missing, just update. |
Toon Verwaest
2014/03/10 13:50:44
we went -> If we get here, go from monomorphic to
mvstanton
2014/03/20 15:51:53
Done.
|
+ __ mov(FieldOperand(ebx, edx, times_half_pointer_size, |
+ FixedArray::kHeaderSize), |
+ Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); |
+ __ jmp(&slow); |
+ |
+ __ bind(&miss_uninit); |
+ GenerateMiss(masm); |
+ __ jmp(&slow); |
+ } else { |
+ // Verify still monomorphic, miss otherwise |
Toon Verwaest
2014/03/10 13:50:44
Verify whether the input function matches the reco
mvstanton
2014/03/20 15:51:53
After rearranging the function, I kept your ideal
|
+ __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, |
+ FixedArray::kHeaderSize)); |
+ __ cmp(ecx, edi); |
+ __ j(equal, &mono); |
Toon Verwaest
2014/03/10 13:50:44
Same here as above. This path should be inlined he
mvstanton
2014/03/20 15:51:53
Done.
|
+ // Uh-oh, we missed, have to fall back to slow stub, go ahead and patch but |
+ // we still have to handle the slow case. |
+ GenerateMiss(masm); |
+ __ jmp(&slow); |
+ } |
+ |
+ __ bind(&mono); |
+ if (!monomorphic()) { |
+ GenerateCall(masm, true, true, false); |
+ } else { |
+ GenerateCall(masm, true, args_match(), strict_native()); |
+ } |
+ |
+ // the slow case |
+ __ bind(&slow); |
+ GenerateCall(masm, false, false, false); |
+} |
+ |
+ |
+void CallICStub::GenerateMiss(MacroAssembler* masm) { |
+ // Get the receiver of the function from the stack; 1 ~ return address. |
+ __ mov(ecx, Operand(esp, (arg_count() + 1) * kPointerSize)); |
+ |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ |
+ // Push the receiver and the function and feedback info. |
+ __ push(ecx); |
+ __ push(edi); |
+ __ push(ebx); |
+ __ push(edx); |
+ |
+ // Call the entry. |
+ CEntryStub stub(1); |
+ __ mov(eax, Immediate(4)); |
+ __ mov(ebx, Immediate(ExternalReference(IC_Utility(IC::kCallIC_Miss), |
+ masm->isolate()))); |
+ __ CallStub(&stub); |
+ |
+ // Move result to edi and exit the internal frame. |
+ __ mov(edi, eax); |
+ } |
+} |
+ |
+ |
void CallFunctionStub::Generate(MacroAssembler* masm) { |
// ebx : feedback vector |
// edx : (only if ebx is not the megamorphic symbol) slot in feedback |
@@ -2436,10 +2606,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
// Goto slow case if we do not have a function. |
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |
__ j(not_equal, &slow); |
- |
- if (RecordCallTarget()) { |
- GenerateRecordCallTarget(masm); |
- } |
} |
// Fast-case: Just invoke the function. |
@@ -2479,14 +2645,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
if (NeedsChecks()) { |
// Slow-case: Non-function called. |
__ bind(&slow); |
- if (RecordCallTarget()) { |
- // If there is a call target cache, mark it megamorphic in the |
- // non-function case. MegamorphicSentinel is an immortal immovable |
- // object (megamorphic symbol) so no write barrier is needed. |
- __ mov(FieldOperand(ebx, edx, times_half_pointer_size, |
- FixedArray::kHeaderSize), |
- Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); |
- } |
// Check for function proxy. |
__ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); |
__ j(not_equal, &non_function); |
@@ -2586,6 +2744,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
// It is important that the store buffer overflow stubs are generated first. |
ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
+ // CallICStub::GenerateAheadOfTime(isolate); |
if (Serializer::enabled()) { |
PlatformFeatureScope sse2(SSE2); |
BinaryOpICStub::GenerateAheadOfTime(isolate); |