OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
6 | 6 |
7 #include "src/bootstrapper.h" | 7 #include "src/bootstrapper.h" |
8 #include "src/code-stubs.h" | 8 #include "src/code-stubs.h" |
9 #include "src/codegen.h" | 9 #include "src/codegen.h" |
10 #include "src/ic/handler-compiler.h" | 10 #include "src/ic/handler-compiler.h" |
(...skipping 2050 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2061 | 2061 |
2062 | 2062 |
2063 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { | 2063 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { |
2064 __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); | 2064 __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
2065 __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset)); | 2065 __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset)); |
2066 __ movp(vector, FieldOperand(vector, | 2066 __ movp(vector, FieldOperand(vector, |
2067 SharedFunctionInfo::kFeedbackVectorOffset)); | 2067 SharedFunctionInfo::kFeedbackVectorOffset)); |
2068 } | 2068 } |
2069 | 2069 |
2070 | 2070 |
2071 void CallIC_ArrayStub::Generate(MacroAssembler* masm) { | 2071 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
2072 // rdi - function | 2072 // rdi - function |
2073 // rdx - slot id (as integer) | 2073 // rdx - slot id |
2074 // rbx - vector | 2074 // rbx - vector |
2075 Label miss; | 2075 // rcx - allocation site (loaded from vector[slot]). |
2076 int argc = arg_count(); | 2076 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r8); |
2077 ParameterCount actual(argc); | 2077 __ cmpp(rdi, r8); |
2078 | 2078 __ j(not_equal, miss); |
2079 __ SmiToInteger32(rdx, rdx); | |
2080 | |
2081 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); | |
2082 __ cmpp(rdi, rcx); | |
2083 __ j(not_equal, &miss); | |
2084 | 2079 |
2085 __ movp(rax, Immediate(arg_count())); | 2080 __ movp(rax, Immediate(arg_count())); |
2086 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, | |
2087 FixedArray::kHeaderSize)); | |
2088 // Verify that ecx contains an AllocationSite | |
2089 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), | |
2090 Heap::kAllocationSiteMapRootIndex); | |
2091 __ j(not_equal, &miss, Label::kNear); | |
2092 | 2081 |
2093 // Increment the call count for monomorphic function calls. | 2082 // Increment the call count for monomorphic function calls. |
2094 { | 2083 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, |
2095 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size, | 2084 FixedArray::kHeaderSize + kPointerSize), |
2096 FixedArray::kHeaderSize + kPointerSize), | 2085 Smi::FromInt(CallICNexus::kCallCountIncrement)); |
2097 Smi::FromInt(CallICNexus::kCallCountIncrement)); | |
2098 | 2086 |
2099 __ movp(rbx, rcx); | 2087 __ movp(rbx, rcx); |
2100 __ movp(rdx, rdi); | 2088 __ movp(rdx, rdi); |
2101 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 2089 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
2102 __ TailCallStub(&stub); | 2090 __ TailCallStub(&stub); |
2103 } | |
2104 | |
2105 __ bind(&miss); | |
2106 GenerateMiss(masm); | |
2107 | |
2108 // The slow case, we need this no matter what to complete a call after a miss. | |
2109 __ Set(rax, arg_count()); | |
2110 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
2111 } | 2091 } |
2112 | 2092 |
2113 | 2093 |
2114 void CallICStub::Generate(MacroAssembler* masm) { | 2094 void CallICStub::Generate(MacroAssembler* masm) { |
2115 // rdi - function | 2095 // rdi - function |
2116 // rdx - slot id | 2096 // rdx - slot id |
2117 // rbx - vector | 2097 // rbx - vector |
2118 Isolate* isolate = masm->isolate(); | 2098 Isolate* isolate = masm->isolate(); |
2119 const int with_types_offset = | 2099 const int with_types_offset = |
2120 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2100 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
(...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2177 | 2157 |
2178 __ bind(&slow); | 2158 __ bind(&slow); |
2179 EmitSlowCase(masm, &args, argc); | 2159 EmitSlowCase(masm, &args, argc); |
2180 | 2160 |
2181 if (CallAsMethod()) { | 2161 if (CallAsMethod()) { |
2182 __ bind(&wrap); | 2162 __ bind(&wrap); |
2183 EmitWrapCase(masm, &args, &cont); | 2163 EmitWrapCase(masm, &args, &cont); |
2184 } | 2164 } |
2185 | 2165 |
2186 __ bind(&extra_checks_or_miss); | 2166 __ bind(&extra_checks_or_miss); |
2187 Label uninitialized, miss; | 2167 Label uninitialized, miss, not_allocation_site; |
2188 | 2168 |
2189 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); | 2169 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate)); |
2190 __ j(equal, &slow_start); | 2170 __ j(equal, &slow_start); |
2191 | 2171 |
| 2172 // Check if we have an allocation site. |
| 2173 __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), |
| 2174 Heap::kAllocationSiteMapRootIndex); |
| 2175 __ j(not_equal, ¬_allocation_site); |
| 2176 |
| 2177 // We have an allocation site. |
| 2178 HandleArrayCase(masm, &miss); |
| 2179 |
| 2180 __ bind(¬_allocation_site); |
| 2181 |
2192 // The following cases attempt to handle MISS cases without going to the | 2182 // The following cases attempt to handle MISS cases without going to the |
2193 // runtime. | 2183 // runtime. |
2194 if (FLAG_trace_ic) { | 2184 if (FLAG_trace_ic) { |
2195 __ jmp(&miss); | 2185 __ jmp(&miss); |
2196 } | 2186 } |
2197 | 2187 |
2198 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); | 2188 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate)); |
2199 __ j(equal, &uninitialized); | 2189 __ j(equal, &uninitialized); |
2200 | 2190 |
2201 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 2191 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2271 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2261 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
2272 FrameScope scope(masm, StackFrame::INTERNAL); | 2262 FrameScope scope(masm, StackFrame::INTERNAL); |
2273 | 2263 |
2274 // Push the receiver and the function and feedback info. | 2264 // Push the receiver and the function and feedback info. |
2275 __ Push(rdi); | 2265 __ Push(rdi); |
2276 __ Push(rbx); | 2266 __ Push(rbx); |
2277 __ Integer32ToSmi(rdx, rdx); | 2267 __ Integer32ToSmi(rdx, rdx); |
2278 __ Push(rdx); | 2268 __ Push(rdx); |
2279 | 2269 |
2280 // Call the entry. | 2270 // Call the entry. |
2281 Runtime::FunctionId id = GetICState() == DEFAULT | 2271 __ CallRuntime(Runtime::kCallIC_Miss, 3); |
2282 ? Runtime::kCallIC_Miss | |
2283 : Runtime::kCallIC_Customization_Miss; | |
2284 __ CallRuntime(id, 3); | |
2285 | 2272 |
2286 // Move result to edi and exit the internal frame. | 2273 // Move result to edi and exit the internal frame. |
2287 __ movp(rdi, rax); | 2274 __ movp(rdi, rax); |
2288 } | 2275 } |
2289 | 2276 |
2290 | 2277 |
2291 bool CEntryStub::NeedsImmovableCode() { | 2278 bool CEntryStub::NeedsImmovableCode() { |
2292 return false; | 2279 return false; |
2293 } | 2280 } |
2294 | 2281 |
(...skipping 2347 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4642 } | 4629 } |
4643 | 4630 |
4644 | 4631 |
4645 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 4632 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
4646 EmitLoadTypeFeedbackVector(masm, rbx); | 4633 EmitLoadTypeFeedbackVector(masm, rbx); |
4647 CallICStub stub(isolate(), state()); | 4634 CallICStub stub(isolate(), state()); |
4648 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | 4635 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
4649 } | 4636 } |
4650 | 4637 |
4651 | 4638 |
4652 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | |
4653 EmitLoadTypeFeedbackVector(masm, rbx); | |
4654 CallIC_ArrayStub stub(isolate(), state()); | |
4655 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | |
4656 } | |
4657 | |
4658 | |
4659 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4639 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4660 if (masm->isolate()->function_entry_hook() != NULL) { | 4640 if (masm->isolate()->function_entry_hook() != NULL) { |
4661 ProfileEntryHookStub stub(masm->isolate()); | 4641 ProfileEntryHookStub stub(masm->isolate()); |
4662 masm->CallStub(&stub); | 4642 masm->CallStub(&stub); |
4663 } | 4643 } |
4664 } | 4644 } |
4665 | 4645 |
4666 | 4646 |
4667 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 4647 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
4668 // This stub can be called from essentially anywhere, so it needs to save | 4648 // This stub can be called from essentially anywhere, so it needs to save |
(...skipping 925 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5594 kStackSpace, nullptr, return_value_operand, NULL); | 5574 kStackSpace, nullptr, return_value_operand, NULL); |
5595 } | 5575 } |
5596 | 5576 |
5597 | 5577 |
5598 #undef __ | 5578 #undef __ |
5599 | 5579 |
5600 } // namespace internal | 5580 } // namespace internal |
5601 } // namespace v8 | 5581 } // namespace v8 |
5602 | 5582 |
5603 #endif // V8_TARGET_ARCH_X64 | 5583 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |