Index: src/ia32/code-stubs-ia32.cc |
diff --git a/src/ia32/code-stubs-ia32.cc b/src/ia32/code-stubs-ia32.cc |
index d1725bbdb13dc1768b0e089566b184e78d8b5708..211ebfd0c46e4163ec7a5fdea0b86d6803aada41 100644 |
--- a/src/ia32/code-stubs-ia32.cc |
+++ b/src/ia32/code-stubs-ia32.cc |
@@ -2421,6 +2421,178 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) { |
} |
+void CallICStub::GenerateCall( |
+ MacroAssembler* masm, |
+ CallIC::StubType stub_type, |
+ CallIC::ArgumentCheck argument_check, |
+ CallIC::FunctionAttributes attributes) { |
+ Isolate* isolate = masm->isolate(); |
+ Label slow, non_function, cont, wrap; |
+ ParameterCount actual(arg_count()); |
+ |
+ if (stub_type == CallIC::GENERIC) { |
+ // Check that the function really is a JavaScript function. |
+ __ JumpIfSmi(edi, &non_function); |
+ |
+ // Goto slow case if we do not have a function. |
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |
+ __ j(not_equal, &slow); |
+ } |
+ |
+ if (call_type() == CallIC::METHOD) { |
+ if (stub_type == CallIC::GENERIC) { |
+ // Do not transform the receiver for strict mode functions. |
+ __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset)); |
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kStrictModeByteOffset), |
+ 1 << SharedFunctionInfo::kStrictModeBitWithinByte); |
+ __ j(not_equal, &cont); |
+ |
+ // Do not transform the receiver for natives (shared already in ecx). |
+ __ test_b(FieldOperand(ecx, SharedFunctionInfo::kNativeByteOffset), |
+ 1 << SharedFunctionInfo::kNativeBitWithinByte); |
+ __ j(not_equal, &cont); |
+ } |
+ |
+ if (attributes == CallIC::NOT_STRICT_OR_NATIVE) { |
Toon Verwaest
2014/03/24 10:25:44
CallIC::SLOPPY
Seems like in this case you don't
mvstanton
2014/03/25 15:34:50
Good catch, thanks. Done.
|
+ // Load the receiver from the stack. |
+ __ mov(eax, Operand(esp, (arg_count() + 1) * kPointerSize)); |
+ |
+ __ JumpIfSmi(eax, &wrap); |
+ |
+ __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx); |
+ __ j(below, &wrap); |
+ } |
+ |
+ __ bind(&cont); |
+ } |
+ |
+ if (argument_check == CallIC::ARGUMENTS_MATCH) { |
Toon Verwaest
2014/03/24 10:25:44
ARGUMENTS_MUST_MATCH, to distinguish from the othe
mvstanton
2014/03/25 15:34:50
Done.
|
+ __ InvokeFunction(edi, actual, actual, JUMP_FUNCTION, NullCallWrapper()); |
+ } else { |
+ __ InvokeFunction(edi, actual, JUMP_FUNCTION, NullCallWrapper()); |
+ } |
+ |
+ if (stub_type == CallIC::GENERIC) { |
+ // Slow-case: Non-function called. |
+ __ bind(&slow); |
+ // Check for function proxy. |
+ __ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); |
+ __ j(not_equal, &non_function); |
+ __ pop(ecx); |
+ __ push(edi); // put proxy as additional argument under return address |
+ __ push(ecx); |
+ __ Set(eax, Immediate(arg_count() + 1)); |
+ __ Set(ebx, Immediate(0)); |
+ __ GetBuiltinEntry(edx, Builtins::CALL_FUNCTION_PROXY); |
+ { |
+ Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
+ __ jmp(adaptor, RelocInfo::CODE_TARGET); |
+ } |
+ |
+ // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
+ // of the original receiver from the call site). |
+ __ bind(&non_function); |
+ __ mov(Operand(esp, (arg_count() + 1) * kPointerSize), edi); |
+ __ Set(eax, Immediate(arg_count())); |
+ __ Set(ebx, Immediate(0)); |
+ __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION); |
+ Handle<Code> adaptor = isolate->builtins()->ArgumentsAdaptorTrampoline(); |
+ __ jmp(adaptor, RelocInfo::CODE_TARGET); |
+ } |
+ |
+ if (call_type() == CallIC::METHOD && |
+ attributes == CallIC::NOT_STRICT_OR_NATIVE) { |
+ __ bind(&wrap); |
+ // Wrap the receiver and patch it back onto the stack. |
+ { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
+ __ push(edi); |
+ __ push(eax); |
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
+ __ pop(edi); |
+ } |
+ __ mov(Operand(esp, (arg_count() + 1) * kPointerSize), eax); |
+ __ jmp(&cont); |
+ } |
+} |
+ |
+ |
+void CallICStub::Generate(MacroAssembler* masm) { |
+ // edi - function |
+ // ebx - vector |
+ // edx - slot id |
+ Isolate* isolate = masm->isolate(); |
+ Label extra_checks_or_miss, slow; |
+ |
+ // The checks. First, does edi match the recorded monomorphic target? |
+ __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, |
Toon Verwaest
2014/03/24 10:25:44
__ cmp(edi, Field...
mvstanton
2014/03/25 15:34:50
Done.
|
+ FixedArray::kHeaderSize)); |
+ __ cmp(ecx, edi); |
+ __ j(not_equal, &extra_checks_or_miss); |
+ |
+ if (stub_type() != CallIC::MONOMORPHIC) { |
Toon Verwaest
2014/03/24 10:25:44
stub_type() == CallIC::DEFAULT_MONOMORPHIC
mvstanton
2014/03/25 15:34:50
Done.
|
+ GenerateCall(masm, |
+ CallIC::MONOMORPHIC, |
+ CallIC::ARGUMENTS_MATCH, |
Toon Verwaest
2014/03/24 10:25:44
Can we merge this with the non-default case by jus
mvstanton
2014/03/25 15:34:50
Yes indeed, thanks!
|
+ CallIC::NOT_STRICT_OR_NATIVE); |
Toon Verwaest
2014/03/24 10:25:44
CallIC::default_strict_or_native() {
return argu
|
+ } else { |
+ GenerateCall(masm, CallIC::MONOMORPHIC, |
+ argument_check(), |
+ function_attributes()); |
+ } |
+ |
+ __ bind(&extra_checks_or_miss); |
+ if (stub_type() != CallIC::MONOMORPHIC) { |
+ Label miss_uninit; |
+ |
+ __ cmp(ecx, Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); |
+ __ j(equal, &slow); |
+ __ cmp(ecx, Immediate(TypeFeedbackInfo::UninitializedSentinel(isolate))); |
+ __ j(equal, &miss_uninit); |
+ // If we get here, go from monomorphic to megamorphic, Don't bother missing, |
+ // just update. |
+ __ mov(FieldOperand(ebx, edx, times_half_pointer_size, |
+ FixedArray::kHeaderSize), |
+ Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); |
+ __ jmp(&slow); |
+ |
+ __ bind(&miss_uninit); |
+ } |
+ |
+ GenerateMiss(masm); |
+ |
+ // the slow case |
+ __ bind(&slow); |
+ GenerateCall(masm, CallIC::GENERIC, CallIC::ARGUMENTS_DONT_MATCH, |
+ CallIC::NOT_STRICT_OR_NATIVE); |
+} |
+ |
+ |
+void CallICStub::GenerateMiss(MacroAssembler* masm) { |
+ // Get the receiver of the function from the stack; 1 ~ return address. |
+ __ mov(ecx, Operand(esp, (arg_count() + 1) * kPointerSize)); |
+ |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ |
+ // Push the receiver and the function and feedback info. |
+ __ push(ecx); |
+ __ push(edi); |
+ __ push(ebx); |
+ __ push(edx); |
+ |
+ // Call the entry. |
+ CEntryStub stub(1); |
+ __ mov(eax, Immediate(4)); |
+ __ mov(ebx, Immediate(ExternalReference(IC_Utility(IC::kCallIC_Miss), |
+ masm->isolate()))); |
+ __ CallStub(&stub); |
+ |
+ // Move result to edi and exit the internal frame. |
+ __ mov(edi, eax); |
+ } |
+} |
+ |
+ |
void CallFunctionStub::Generate(MacroAssembler* masm) { |
// ebx : feedback vector |
// edx : (only if ebx is not the megamorphic symbol) slot in feedback |
@@ -2436,10 +2608,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
// Goto slow case if we do not have a function. |
__ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx); |
__ j(not_equal, &slow); |
- |
- if (RecordCallTarget()) { |
- GenerateRecordCallTarget(masm); |
- } |
} |
// Fast-case: Just invoke the function. |
@@ -2479,14 +2647,6 @@ void CallFunctionStub::Generate(MacroAssembler* masm) { |
if (NeedsChecks()) { |
// Slow-case: Non-function called. |
__ bind(&slow); |
- if (RecordCallTarget()) { |
- // If there is a call target cache, mark it megamorphic in the |
- // non-function case. MegamorphicSentinel is an immortal immovable |
- // object (megamorphic symbol) so no write barrier is needed. |
- __ mov(FieldOperand(ebx, edx, times_half_pointer_size, |
- FixedArray::kHeaderSize), |
- Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate))); |
- } |
// Check for function proxy. |
__ CmpInstanceType(ecx, JS_FUNCTION_PROXY_TYPE); |
__ j(not_equal, &non_function); |
@@ -2586,6 +2746,7 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
// It is important that the store buffer overflow stubs are generated first. |
ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate); |
CreateAllocationSiteStub::GenerateAheadOfTime(isolate); |
+ // CallICStub::GenerateAheadOfTime(isolate); |
if (Serializer::enabled()) { |
PlatformFeatureScope sse2(SSE2); |
BinaryOpICStub::GenerateAheadOfTime(isolate); |