OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "bootstrapper.h" | 9 #include "bootstrapper.h" |
10 #include "code-stubs.h" | 10 #include "code-stubs.h" |
(...skipping 2123 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2134 __ Pop(rbx); | 2134 __ Pop(rbx); |
2135 __ Pop(rdi); | 2135 __ Pop(rdi); |
2136 | 2136 |
2137 __ bind(&done); | 2137 __ bind(&done); |
2138 __ Integer32ToSmi(rdx, rdx); | 2138 __ Integer32ToSmi(rdx, rdx); |
2139 | 2139 |
2140 __ bind(&done_no_smi_convert); | 2140 __ bind(&done_no_smi_convert); |
2141 } | 2141 } |
2142 | 2142 |
2143 | 2143 |
| 2144 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) { |
| 2145 // Do not transform the receiver for strict mode functions. |
| 2146 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 2147 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), |
| 2148 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); |
| 2149 __ j(not_equal, cont); |
| 2150 |
| 2151 // Do not transform the receiver for natives. |
| 2152 // SharedFunctionInfo is already loaded into rcx. |
| 2153 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), |
| 2154 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); |
| 2155 __ j(not_equal, cont); |
| 2156 } |
| 2157 |
| 2158 |
| 2159 static void EmitSlowCase(Isolate* isolate, |
| 2160 MacroAssembler* masm, |
| 2161 StackArgumentsAccessor* args, |
| 2162 int argc, |
| 2163 Label* non_function) { |
| 2164 // Check for function proxy. |
| 2165 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); |
| 2166 __ j(not_equal, non_function); |
| 2167 __ PopReturnAddressTo(rcx); |
| 2168 __ Push(rdi); // put proxy as additional argument under return address |
| 2169 __ PushReturnAddressFrom(rcx); |
| 2170 __ Set(rax, argc + 1); |
| 2171 __ Set(rbx, 0); |
| 2172 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); |
| 2173 { |
| 2174 Handle<Code> adaptor = |
| 2175 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(); |
| 2176 __ jmp(adaptor, RelocInfo::CODE_TARGET); |
| 2177 } |
| 2178 |
| 2179 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead |
| 2180 // of the original receiver from the call site). |
| 2181 __ bind(non_function); |
| 2182 __ movp(args->GetReceiverOperand(), rdi); |
| 2183 __ Set(rax, argc); |
| 2184 __ Set(rbx, 0); |
| 2185 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); |
| 2186 Handle<Code> adaptor = |
| 2187 isolate->builtins()->ArgumentsAdaptorTrampoline(); |
| 2188 __ Jump(adaptor, RelocInfo::CODE_TARGET); |
| 2189 } |
| 2190 |
| 2191 |
| 2192 static void EmitWrapCase(MacroAssembler* masm, |
| 2193 StackArgumentsAccessor* args, |
| 2194 Label* cont) { |
| 2195 // Wrap the receiver and patch it back onto the stack. |
| 2196 { FrameScope frame_scope(masm, StackFrame::INTERNAL); |
| 2197 __ Push(rdi); |
| 2198 __ Push(rax); |
| 2199 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); |
| 2200 __ Pop(rdi); |
| 2201 } |
| 2202 __ movp(args->GetReceiverOperand(), rax); |
| 2203 __ jmp(cont); |
| 2204 } |
| 2205 |
| 2206 |
2144 void CallFunctionStub::Generate(MacroAssembler* masm) { | 2207 void CallFunctionStub::Generate(MacroAssembler* masm) { |
2145 // rbx : feedback vector | |
2146 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback | |
2147 // vector (Smi) | |
2148 // rdi : the function to call | 2208 // rdi : the function to call |
| 2209 |
| 2210 // wrap_and_call can only be true if we are compiling a monomorphic method. |
| 2211 Isolate* isolate = masm->isolate(); |
2149 Label slow, non_function, wrap, cont; | 2212 Label slow, non_function, wrap, cont; |
2150 StackArgumentsAccessor args(rsp, argc_); | 2213 int argc = argc_; |
| 2214 StackArgumentsAccessor args(rsp, argc); |
2151 | 2215 |
2152 if (NeedsChecks()) { | 2216 if (NeedsChecks()) { |
2153 // Check that the function really is a JavaScript function. | 2217 // Check that the function really is a JavaScript function. |
2154 __ JumpIfSmi(rdi, &non_function); | 2218 __ JumpIfSmi(rdi, &non_function); |
2155 | 2219 |
2156 // Goto slow case if we do not have a function. | 2220 // Goto slow case if we do not have a function. |
2157 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 2221 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
2158 __ j(not_equal, &slow); | 2222 __ j(not_equal, &slow); |
2159 | |
2160 if (RecordCallTarget()) { | |
2161 GenerateRecordCallTarget(masm); | |
2162 // Type information was updated. Because we may call Array, which | |
2163 // expects either undefined or an AllocationSite in rbx we need | |
2164 // to set rbx to undefined. | |
2165 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); | |
2166 } | |
2167 } | 2223 } |
2168 | 2224 |
2169 // Fast-case: Just invoke the function. | 2225 // Fast-case: Just invoke the function. |
2170 ParameterCount actual(argc_); | 2226 ParameterCount actual(argc); |
2171 | 2227 |
2172 if (CallAsMethod()) { | 2228 if (CallAsMethod()) { |
2173 if (NeedsChecks()) { | 2229 if (NeedsChecks()) { |
2174 // Do not transform the receiver for strict mode functions. | 2230 EmitContinueIfStrictOrNative(masm, &cont); |
2175 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); | |
2176 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset), | |
2177 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte)); | |
2178 __ j(not_equal, &cont); | |
2179 | |
2180 // Do not transform the receiver for natives. | |
2181 // SharedFunctionInfo is already loaded into rcx. | |
2182 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset), | |
2183 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte)); | |
2184 __ j(not_equal, &cont); | |
2185 } | 2231 } |
2186 | 2232 |
2187 | |
2188 // Load the receiver from the stack. | 2233 // Load the receiver from the stack. |
2189 __ movp(rax, args.GetReceiverOperand()); | 2234 __ movp(rax, args.GetReceiverOperand()); |
2190 | 2235 |
2191 if (NeedsChecks()) { | 2236 if (NeedsChecks()) { |
2192 __ JumpIfSmi(rax, &wrap); | 2237 __ JumpIfSmi(rax, &wrap); |
2193 | 2238 |
2194 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); | 2239 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); |
2195 __ j(below, &wrap); | 2240 __ j(below, &wrap); |
2196 } else { | 2241 } else { |
2197 __ jmp(&wrap); | 2242 __ jmp(&wrap); |
2198 } | 2243 } |
2199 | 2244 |
2200 __ bind(&cont); | 2245 __ bind(&cont); |
2201 } | 2246 } |
| 2247 |
2202 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); | 2248 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); |
2203 | 2249 |
2204 if (NeedsChecks()) { | 2250 if (NeedsChecks()) { |
2205 // Slow-case: Non-function called. | 2251 // Slow-case: Non-function called. |
2206 __ bind(&slow); | 2252 __ bind(&slow); |
2207 if (RecordCallTarget()) { | 2253 EmitSlowCase(isolate, masm, &args, argc, &non_function); |
2208 // If there is a call target cache, mark it megamorphic in the | |
2209 // non-function case. MegamorphicSentinel is an immortal immovable | |
2210 // object (megamorphic symbol) so no write barrier is needed. | |
2211 __ SmiToInteger32(rdx, rdx); | |
2212 __ Move(FieldOperand(rbx, rdx, times_pointer_size, | |
2213 FixedArray::kHeaderSize), | |
2214 TypeFeedbackInfo::MegamorphicSentinel(isolate())); | |
2215 __ Integer32ToSmi(rdx, rdx); | |
2216 } | |
2217 // Check for function proxy. | |
2218 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE); | |
2219 __ j(not_equal, &non_function); | |
2220 __ PopReturnAddressTo(rcx); | |
2221 __ Push(rdi); // put proxy as additional argument under return address | |
2222 __ PushReturnAddressFrom(rcx); | |
2223 __ Set(rax, argc_ + 1); | |
2224 __ Set(rbx, 0); | |
2225 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY); | |
2226 { | |
2227 Handle<Code> adaptor = | |
2228 isolate()->builtins()->ArgumentsAdaptorTrampoline(); | |
2229 __ jmp(adaptor, RelocInfo::CODE_TARGET); | |
2230 } | |
2231 | |
2232 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead | |
2233 // of the original receiver from the call site). | |
2234 __ bind(&non_function); | |
2235 __ movp(args.GetReceiverOperand(), rdi); | |
2236 __ Set(rax, argc_); | |
2237 __ Set(rbx, 0); | |
2238 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION); | |
2239 Handle<Code> adaptor = | |
2240 isolate()->builtins()->ArgumentsAdaptorTrampoline(); | |
2241 __ Jump(adaptor, RelocInfo::CODE_TARGET); | |
2242 } | 2254 } |
2243 | 2255 |
2244 if (CallAsMethod()) { | 2256 if (CallAsMethod()) { |
2245 __ bind(&wrap); | 2257 __ bind(&wrap); |
2246 // Wrap the receiver and patch it back onto the stack. | 2258 EmitWrapCase(masm, &args, &cont); |
2247 { FrameScope frame_scope(masm, StackFrame::INTERNAL); | |
2248 __ Push(rdi); | |
2249 __ Push(rax); | |
2250 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION); | |
2251 __ Pop(rdi); | |
2252 } | |
2253 __ movp(args.GetReceiverOperand(), rax); | |
2254 __ jmp(&cont); | |
2255 } | 2259 } |
2256 } | 2260 } |
2257 | 2261 |
2258 | 2262 |
2259 void CallConstructStub::Generate(MacroAssembler* masm) { | 2263 void CallConstructStub::Generate(MacroAssembler* masm) { |
2260 // rax : number of arguments | 2264 // rax : number of arguments |
2261 // rbx : feedback vector | 2265 // rbx : feedback vector |
2262 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback | 2266 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback |
2263 // vector (Smi) | 2267 // vector (Smi) |
2264 // rdi : constructor function | 2268 // rdi : constructor function |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2315 __ bind(&non_function_call); | 2319 __ bind(&non_function_call); |
2316 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); | 2320 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); |
2317 __ bind(&do_call); | 2321 __ bind(&do_call); |
2318 // Set expected number of arguments to zero (not changing rax). | 2322 // Set expected number of arguments to zero (not changing rax). |
2319 __ Set(rbx, 0); | 2323 __ Set(rbx, 0); |
2320 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), | 2324 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(), |
2321 RelocInfo::CODE_TARGET); | 2325 RelocInfo::CODE_TARGET); |
2322 } | 2326 } |
2323 | 2327 |
2324 | 2328 |
| 2329 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) { |
| 2330 __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); |
| 2331 __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset)); |
| 2332 __ movp(vector, FieldOperand(vector, |
| 2333 SharedFunctionInfo::kFeedbackVectorOffset)); |
| 2334 } |
| 2335 |
| 2336 |
| 2337 void CallICStub::Generate(MacroAssembler* masm) { |
| 2338 // rdi - function |
| 2339 // rbx - vector |
| 2340 // rdx - slot id |
| 2341 Isolate* isolate = masm->isolate(); |
| 2342 Label extra_checks_or_miss, slow_start; |
| 2343 Label slow, non_function, wrap, cont; |
| 2344 Label have_js_function; |
| 2345 int argc = state_.arg_count(); |
| 2346 StackArgumentsAccessor args(rsp, argc); |
| 2347 ParameterCount actual(argc); |
| 2348 |
| 2349 EmitLoadTypeFeedbackVector(masm, rbx); |
| 2350 |
| 2351 // The checks. First, does rdi match the recorded monomorphic target? |
| 2352 __ SmiToInteger32(rdx, rdx); |
| 2353 __ cmpq(rdi, FieldOperand(rbx, rdx, times_pointer_size, |
| 2354 FixedArray::kHeaderSize)); |
| 2355 __ j(not_equal, &extra_checks_or_miss); |
| 2356 |
| 2357 __ bind(&have_js_function); |
| 2358 if (state_.CallAsMethod()) { |
| 2359 EmitContinueIfStrictOrNative(masm, &cont); |
| 2360 |
| 2361 // Load the receiver from the stack. |
| 2362 __ movp(rax, args.GetReceiverOperand()); |
| 2363 |
| 2364 __ JumpIfSmi(rax, &wrap); |
| 2365 |
| 2366 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx); |
| 2367 __ j(below, &wrap); |
| 2368 |
| 2369 __ bind(&cont); |
| 2370 } |
| 2371 |
| 2372 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper()); |
| 2373 |
| 2374 __ bind(&slow); |
| 2375 EmitSlowCase(isolate, masm, &args, argc, &non_function); |
| 2376 |
| 2377 if (state_.CallAsMethod()) { |
| 2378 __ bind(&wrap); |
| 2379 EmitWrapCase(masm, &args, &cont); |
| 2380 } |
| 2381 |
| 2382 __ bind(&extra_checks_or_miss); |
| 2383 Label miss; |
| 2384 |
| 2385 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size, |
| 2386 FixedArray::kHeaderSize)); |
| 2387 __ Cmp(rcx, TypeFeedbackInfo::MegamorphicSentinel(isolate)); |
| 2388 __ j(equal, &slow_start); |
| 2389 __ Cmp(rcx, TypeFeedbackInfo::UninitializedSentinel(isolate)); |
| 2390 __ j(equal, &miss); |
| 2391 |
| 2392 if (!FLAG_trace_ic) { |
| 2393 // We are going megamorphic, and we don't want to visit the runtime. |
| 2394 __ Move(FieldOperand(rbx, rdx, times_pointer_size, |
| 2395 FixedArray::kHeaderSize), |
| 2396 TypeFeedbackInfo::MegamorphicSentinel(isolate)); |
| 2397 __ jmp(&slow_start); |
| 2398 } |
| 2399 |
| 2400 // We are here because tracing is on or we are going monomorphic. |
| 2401 __ bind(&miss); |
| 2402 GenerateMiss(masm); |
| 2403 |
| 2404 // the slow case |
| 2405 __ bind(&slow_start); |
| 2406 // Check that function is not a smi. |
| 2407 __ JumpIfSmi(rdi, &non_function); |
| 2408 // Check that function is a JSFunction. |
| 2409 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
| 2410 __ j(not_equal, &slow); |
| 2411 __ jmp(&have_js_function); |
| 2412 |
| 2413 // Unreachable |
| 2414 __ int3(); |
| 2415 } |
| 2416 |
| 2417 |
| 2418 void CallICStub::GenerateMiss(MacroAssembler* masm) { |
| 2419 // Get the receiver of the function from the stack; 1 ~ return address. |
| 2420 __ movp(rcx, Operand(rsp, (state_.arg_count() + 1) * kPointerSize)); |
| 2421 |
| 2422 { |
| 2423 FrameScope scope(masm, StackFrame::INTERNAL); |
| 2424 |
| 2425 // Push the receiver and the function and feedback info. |
| 2426 __ Push(rcx); |
| 2427 __ Push(rdi); |
| 2428 __ Push(rbx); |
| 2429 __ Integer32ToSmi(rdx, rdx); |
| 2430 __ Push(rdx); |
| 2431 |
| 2432 // Call the entry. |
| 2433 ExternalReference miss = ExternalReference(IC_Utility(IC::kCallIC_Miss), |
| 2434 masm->isolate()); |
| 2435 __ CallExternalReference(miss, 4); |
| 2436 |
| 2437 // Move result to edi and exit the internal frame. |
| 2438 __ movp(rdi, rax); |
| 2439 } |
| 2440 } |
| 2441 |
| 2442 |
2325 bool CEntryStub::NeedsImmovableCode() { | 2443 bool CEntryStub::NeedsImmovableCode() { |
2326 return false; | 2444 return false; |
2327 } | 2445 } |
2328 | 2446 |
2329 | 2447 |
2330 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { | 2448 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { |
2331 CEntryStub::GenerateAheadOfTime(isolate); | 2449 CEntryStub::GenerateAheadOfTime(isolate); |
2332 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); | 2450 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate); |
2333 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); | 2451 StubFailureTrampolineStub::GenerateAheadOfTime(isolate); |
2334 // It is important that the store buffer overflow stubs are generated first. | 2452 // It is important that the store buffer overflow stubs are generated first. |
(...skipping 2564 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4899 return_value_operand, | 5017 return_value_operand, |
4900 NULL); | 5018 NULL); |
4901 } | 5019 } |
4902 | 5020 |
4903 | 5021 |
4904 #undef __ | 5022 #undef __ |
4905 | 5023 |
4906 } } // namespace v8::internal | 5024 } } // namespace v8::internal |
4907 | 5025 |
4908 #endif // V8_TARGET_ARCH_X64 | 5026 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |