OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 2191 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2202 | 2202 |
2203 | 2203 |
2204 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { | 2204 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { |
2205 __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); | 2205 __ mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset)); |
2206 __ mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset)); | 2206 __ mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset)); |
2207 __ mov(vector, FieldOperand(vector, | 2207 __ mov(vector, FieldOperand(vector, |
2208 SharedFunctionInfo::kFeedbackVectorOffset)); | 2208 SharedFunctionInfo::kFeedbackVectorOffset)); |
2209 } | 2209 } |
2210 | 2210 |
2211 | 2211 |
2212 void CallIC_ArrayStub::Generate(MacroAssembler* masm) { | 2212 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) { |
2213 // edi - function | 2213 // edi - function |
2214 // edx - slot id | 2214 // edx - slot id |
2215 // ebx - vector | 2215 // ebx - vector |
2216 Label miss; | |
2217 int argc = arg_count(); | |
2218 ParameterCount actual(argc); | |
2219 | |
2220 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); | 2216 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx); |
2221 __ cmp(edi, ecx); | 2217 __ cmp(edi, ecx); |
2222 __ j(not_equal, &miss); | 2218 __ j(not_equal, miss); |
2223 | 2219 |
2224 __ mov(eax, arg_count()); | 2220 __ mov(eax, arg_count()); |
| 2221 // Reload ecx. |
2225 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, | 2222 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size, |
2226 FixedArray::kHeaderSize)); | 2223 FixedArray::kHeaderSize)); |
2227 | 2224 |
2228 // Verify that ecx contains an AllocationSite | |
2229 Factory* factory = masm->isolate()->factory(); | |
2230 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset), | |
2231 factory->allocation_site_map()); | |
2232 __ j(not_equal, &miss); | |
2233 | |
2234 // Increment the call count for monomorphic function calls. | 2225 // Increment the call count for monomorphic function calls. |
2235 __ add(FieldOperand(ebx, edx, times_half_pointer_size, | 2226 __ add(FieldOperand(ebx, edx, times_half_pointer_size, |
2236 FixedArray::kHeaderSize + kPointerSize), | 2227 FixedArray::kHeaderSize + kPointerSize), |
2237 Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement))); | 2228 Immediate(Smi::FromInt(CallICNexus::kCallCountIncrement))); |
2238 | 2229 |
2239 __ mov(ebx, ecx); | 2230 __ mov(ebx, ecx); |
2240 __ mov(edx, edi); | 2231 __ mov(edx, edi); |
2241 ArrayConstructorStub stub(masm->isolate(), arg_count()); | 2232 ArrayConstructorStub stub(masm->isolate(), arg_count()); |
2242 __ TailCallStub(&stub); | 2233 __ TailCallStub(&stub); |
2243 | 2234 |
2244 __ bind(&miss); | 2235 // Unreachable. |
2245 GenerateMiss(masm); | |
2246 | |
2247 // The slow case, we need this no matter what to complete a call after a miss. | |
2248 __ Set(eax, arg_count()); | |
2249 __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); | |
2250 } | 2236 } |
2251 | 2237 |
2252 | 2238 |
2253 void CallICStub::Generate(MacroAssembler* masm) { | 2239 void CallICStub::Generate(MacroAssembler* masm) { |
2254 // edi - function | 2240 // edi - function |
2255 // edx - slot id | 2241 // edx - slot id |
2256 // ebx - vector | 2242 // ebx - vector |
2257 Isolate* isolate = masm->isolate(); | 2243 Isolate* isolate = masm->isolate(); |
2258 const int with_types_offset = | 2244 const int with_types_offset = |
2259 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2245 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2314 | 2300 |
2315 __ bind(&slow); | 2301 __ bind(&slow); |
2316 EmitSlowCase(isolate, masm, argc); | 2302 EmitSlowCase(isolate, masm, argc); |
2317 | 2303 |
2318 if (CallAsMethod()) { | 2304 if (CallAsMethod()) { |
2319 __ bind(&wrap); | 2305 __ bind(&wrap); |
2320 EmitWrapCase(masm, argc, &cont); | 2306 EmitWrapCase(masm, argc, &cont); |
2321 } | 2307 } |
2322 | 2308 |
2323 __ bind(&extra_checks_or_miss); | 2309 __ bind(&extra_checks_or_miss); |
2324 Label uninitialized, miss; | 2310 Label uninitialized, miss, not_allocation_site; |
2325 | 2311 |
2326 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); | 2312 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate))); |
2327 __ j(equal, &slow_start); | 2313 __ j(equal, &slow_start); |
2328 | 2314 |
| 2315 // Check if we have an allocation site. |
| 2316 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset), |
| 2317 Heap::kAllocationSiteMapRootIndex); |
| 2318 __ j(not_equal, ¬_allocation_site); |
| 2319 |
| 2320 // We have an allocation site. |
| 2321 HandleArrayCase(masm, &miss); |
| 2322 |
| 2323 __ bind(¬_allocation_site); |
| 2324 |
2329 // The following cases attempt to handle MISS cases without going to the | 2325 // The following cases attempt to handle MISS cases without going to the |
2330 // runtime. | 2326 // runtime. |
2331 if (FLAG_trace_ic) { | 2327 if (FLAG_trace_ic) { |
2332 __ jmp(&miss); | 2328 __ jmp(&miss); |
2333 } | 2329 } |
2334 | 2330 |
2335 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); | 2331 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate))); |
2336 __ j(equal, &uninitialized); | 2332 __ j(equal, &uninitialized); |
2337 | 2333 |
2338 // We are going megamorphic. If the feedback is a JSFunction, it is fine | 2334 // We are going megamorphic. If the feedback is a JSFunction, it is fine |
(...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2408 | 2404 |
2409 void CallICStub::GenerateMiss(MacroAssembler* masm) { | 2405 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
2410 FrameScope scope(masm, StackFrame::INTERNAL); | 2406 FrameScope scope(masm, StackFrame::INTERNAL); |
2411 | 2407 |
2412 // Push the function and feedback info. | 2408 // Push the function and feedback info. |
2413 __ push(edi); | 2409 __ push(edi); |
2414 __ push(ebx); | 2410 __ push(ebx); |
2415 __ push(edx); | 2411 __ push(edx); |
2416 | 2412 |
2417 // Call the entry. | 2413 // Call the entry. |
2418 Runtime::FunctionId id = GetICState() == DEFAULT | 2414 __ CallRuntime(Runtime::kCallIC_Miss, 3); |
2419 ? Runtime::kCallIC_Miss | |
2420 : Runtime::kCallIC_Customization_Miss; | |
2421 __ CallRuntime(id, 3); | |
2422 | 2415 |
2423 // Move result to edi and exit the internal frame. | 2416 // Move result to edi and exit the internal frame. |
2424 __ mov(edi, eax); | 2417 __ mov(edi, eax); |
2425 } | 2418 } |
2426 | 2419 |
2427 | 2420 |
2428 bool CEntryStub::NeedsImmovableCode() { | 2421 bool CEntryStub::NeedsImmovableCode() { |
2429 return false; | 2422 return false; |
2430 } | 2423 } |
2431 | 2424 |
(...skipping 2453 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4885 } | 4878 } |
4886 | 4879 |
4887 | 4880 |
4888 void CallICTrampolineStub::Generate(MacroAssembler* masm) { | 4881 void CallICTrampolineStub::Generate(MacroAssembler* masm) { |
4889 EmitLoadTypeFeedbackVector(masm, ebx); | 4882 EmitLoadTypeFeedbackVector(masm, ebx); |
4890 CallICStub stub(isolate(), state()); | 4883 CallICStub stub(isolate(), state()); |
4891 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | 4884 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
4892 } | 4885 } |
4893 | 4886 |
4894 | 4887 |
4895 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | |
4896 EmitLoadTypeFeedbackVector(masm, ebx); | |
4897 CallIC_ArrayStub stub(isolate(), state()); | |
4898 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | |
4899 } | |
4900 | |
4901 | |
4902 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4888 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4903 if (masm->isolate()->function_entry_hook() != NULL) { | 4889 if (masm->isolate()->function_entry_hook() != NULL) { |
4904 ProfileEntryHookStub stub(masm->isolate()); | 4890 ProfileEntryHookStub stub(masm->isolate()); |
4905 masm->CallStub(&stub); | 4891 masm->CallStub(&stub); |
4906 } | 4892 } |
4907 } | 4893 } |
4908 | 4894 |
4909 | 4895 |
4910 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 4896 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
4911 // Save volatile registers. | 4897 // Save volatile registers. |
(...skipping 902 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5814 Operand(ebp, 7 * kPointerSize), NULL); | 5800 Operand(ebp, 7 * kPointerSize), NULL); |
5815 } | 5801 } |
5816 | 5802 |
5817 | 5803 |
5818 #undef __ | 5804 #undef __ |
5819 | 5805 |
5820 } // namespace internal | 5806 } // namespace internal |
5821 } // namespace v8 | 5807 } // namespace v8 |
5822 | 5808 |
5823 #endif // V8_TARGET_ARCH_IA32 | 5809 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |