OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #if V8_TARGET_ARCH_X64 | 7 #if V8_TARGET_ARCH_X64 |
8 | 8 |
9 #include "src/bootstrapper.h" | 9 #include "src/bootstrapper.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 2126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2137 CallFunctionNoFeedback(masm, | 2137 CallFunctionNoFeedback(masm, |
2138 arg_count(), | 2138 arg_count(), |
2139 true, | 2139 true, |
2140 CallAsMethod()); | 2140 CallAsMethod()); |
2141 | 2141 |
2142 // Unreachable. | 2142 // Unreachable. |
2143 __ int3(); | 2143 __ int3(); |
2144 } | 2144 } |
2145 | 2145 |
2146 | 2146 |
| 2147 void CallIC_RoundStub::Generate(MacroAssembler* masm) { |
| 2148 Register function = rdi; |
| 2149 Register vector = rbx; |
| 2150 Register slot = rdx; |
| 2151 |
| 2152 Register temp = rax; |
| 2153 XMMRegister xmm_temp1 = xmm1; |
| 2154 XMMRegister xmm_temp2 = xmm0; |
| 2155 Label tail, miss; |
| 2156 |
| 2157 __ SmiToInteger64(slot, slot); |
| 2158 |
| 2159 // Ensure nobody has snuck in another function. |
| 2160 __ BranchIfNotBuiltin(function, temp, kMathRound, &miss); |
| 2161 |
| 2162 if (arg_count() > 0) { |
| 2163 __ movp(temp, Operand(rsp, arg_count() * kPointerSize)); |
| 2164 Handle<Map> map = isolate()->factory()->heap_number_map(); |
| 2165 __ CheckMap(temp, map, &tail, DO_SMI_CHECK); |
| 2166 |
| 2167 __ movsd(xmm_temp1, FieldOperand(temp, HeapNumber::kValueOffset)); |
| 2168 |
| 2169 // If the number is >0, it doesn't round to -0 |
| 2170 __ xorps(xmm_temp2, xmm_temp2); |
| 2171 __ ucomisd(xmm_temp1, xmm_temp2); |
| 2172 __ j(above, &tail, Label::kNear); |
| 2173 |
| 2174 // If the number is <-.5, it doesn't round to -0 |
| 2175 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 |
| 2176 __ movq(temp, minus_one_half); |
| 2177 __ movq(xmm_temp2, temp); |
| 2178 __ ucomisd(xmm_temp1, xmm_temp2); |
| 2179 __ j(below, &tail, Label::kNear); |
| 2180 |
| 2181 // +0 doesn't round to -0 |
| 2182 __ movmskpd(temp, xmm_temp1); |
| 2183 __ testl(temp, Immediate(1)); |
| 2184 __ j(zero, &tail, Label::kNear); |
| 2185 |
| 2186 __ Move(FieldOperand(vector, slot, times_pointer_size, |
| 2187 FixedArray::kHeaderSize + kPointerSize), |
| 2188 Smi::FromInt(kHasReturnedMinusZeroSentinel)); |
| 2189 } |
| 2190 |
| 2191 __ bind(&tail); |
| 2192 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); |
| 2193 |
| 2194 // Unreachable. |
| 2195 __ int3(); |
| 2196 |
| 2197 __ bind(&miss); |
| 2198 GenerateMiss(masm); |
| 2199 __ jmp(&tail); |
| 2200 } |
| 2201 |
| 2202 |
| 2203 void CallIC_FloorStub::Generate(MacroAssembler* masm) { |
| 2204 Register function = rdi; |
| 2205 Register vector = rbx; |
| 2206 Register slot = rdx; |
| 2207 |
| 2208 Register temp1 = rax; |
| 2209 Register temp2 = rsi; |
| 2210 Label tail, miss; |
| 2211 |
| 2212 __ SmiToInteger64(slot, slot); |
| 2213 |
| 2214 // Ensure nobody has snuck in another function. |
| 2215 __ BranchIfNotBuiltin(function, temp1, kMathFloor, &miss); |
| 2216 |
| 2217 if (arg_count() > 0) { |
| 2218 __ movp(temp1, Operand(rsp, arg_count() * kPointerSize)); |
| 2219 Handle<Map> map = isolate()->factory()->heap_number_map(); |
| 2220 __ CheckMap(temp1, map, &tail, DO_SMI_CHECK); |
| 2221 |
| 2222 // Only -0 floors to -0. |
| 2223 __ movq(temp1, FieldOperand(temp1, HeapNumber::kValueOffset)); |
| 2224 static int64_t minus_zero = V8_INT64_C(0x8000000000000000); // -0.0 |
| 2225 __ movq(temp2, minus_zero); |
| 2226 __ cmpq(temp1, temp2); |
| 2227 __ j(not_equal, &tail); |
| 2228 |
| 2229 __ Move(FieldOperand(vector, slot, times_pointer_size, |
| 2230 FixedArray::kHeaderSize + kPointerSize), |
| 2231 Smi::FromInt(kHasReturnedMinusZeroSentinel)); |
| 2232 } |
| 2233 |
| 2234 __ bind(&tail); |
| 2235 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); |
| 2236 |
| 2237 // Unreachable. |
| 2238 __ int3(); |
| 2239 |
| 2240 __ bind(&miss); |
| 2241 GenerateMiss(masm); |
| 2242 __ jmp(&tail); |
| 2243 } |
| 2244 |
| 2245 |
| 2246 void CallIC_CeilStub::Generate(MacroAssembler* masm) { |
| 2247 Register function = rdi; |
| 2248 Register vector = rbx; |
| 2249 Register slot = rdx; |
| 2250 |
| 2251 Register temp = rax; |
| 2252 XMMRegister xmm_temp1 = xmm1; |
| 2253 XMMRegister xmm_temp2 = xmm0; |
| 2254 Label tail, miss; |
| 2255 |
| 2256 __ SmiToInteger64(slot, slot); |
| 2257 |
| 2258 // Ensure nobody has snuck in another function. |
| 2259 __ BranchIfNotBuiltin(function, temp, kMathCeil, &miss); |
| 2260 |
| 2261 if (arg_count() > 0) { |
| 2262 __ movp(temp, Operand(rsp, arg_count() * kPointerSize)); |
| 2263 Handle<Map> map = isolate()->factory()->heap_number_map(); |
| 2264 __ CheckMap(temp, map, &tail, DO_SMI_CHECK); |
| 2265 |
| 2266 __ movsd(xmm_temp1, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 2267 |
| 2268 // If the number is >0, it doesn't round to -0 |
| 2269 __ xorps(xmm_temp2, xmm_temp2); |
| 2270 __ ucomisd(xmm_temp1, xmm_temp2); |
| 2271 __ j(greater, &tail, Label::kNear); |
| 2272 |
| 2273 // If the number is <=-1, it doesn't round to -0 |
| 2274 static int64_t minus_one = V8_INT64_C(0xbff0000000000000); // -1 |
| 2275 __ movq(temp, minus_one); |
| 2276 __ movq(xmm_temp2, temp); |
| 2277 __ ucomisd(xmm_temp1, xmm_temp2); |
| 2278 __ j(less_equal, &tail, Label::kNear); |
| 2279 |
| 2280 // +0 doesn't round to -0. |
| 2281 __ movmskpd(temp, xmm_temp1); |
| 2282 __ testq(temp, Immediate(1)); |
| 2283 __ j(zero, &tail, Label::kNear); |
| 2284 |
| 2285 __ Move(FieldOperand(vector, slot, times_pointer_size, |
| 2286 FixedArray::kHeaderSize + kPointerSize), |
| 2287 Smi::FromInt(kHasReturnedMinusZeroSentinel)); |
| 2288 } |
| 2289 |
| 2290 __ bind(&tail); |
| 2291 CallFunctionNoFeedback(masm, arg_count(), true, CallAsMethod()); |
| 2292 |
| 2293 // Unreachable. |
| 2294 __ int3(); |
| 2295 |
| 2296 __ bind(&miss); |
| 2297 GenerateMiss(masm); |
| 2298 __ jmp(&tail); |
| 2299 } |
| 2300 |
| 2301 |
2147 void CallICStub::Generate(MacroAssembler* masm) { | 2302 void CallICStub::Generate(MacroAssembler* masm) { |
2148 // rdi - function | 2303 // rdi - function |
2149 // rdx - slot id | 2304 // rdx - slot id |
2150 // rbx - vector | 2305 // rbx - vector |
2151 Isolate* isolate = masm->isolate(); | 2306 Isolate* isolate = masm->isolate(); |
2152 const int with_types_offset = | 2307 const int with_types_offset = |
2153 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); | 2308 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex); |
2154 const int generic_offset = | 2309 const int generic_offset = |
2155 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); | 2310 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex); |
2156 Label extra_checks_or_miss, slow_start; | 2311 Label extra_checks_or_miss, slow_start; |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2246 // Goto miss case if we do not have a function. | 2401 // Goto miss case if we do not have a function. |
2247 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); | 2402 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx); |
2248 __ j(not_equal, &miss); | 2403 __ j(not_equal, &miss); |
2249 | 2404 |
2250 // Make sure the function is not the Array() function, which requires special | 2405 // Make sure the function is not the Array() function, which requires special |
2251 // behavior on MISS. | 2406 // behavior on MISS. |
2252 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); | 2407 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx); |
2253 __ cmpp(rdi, rcx); | 2408 __ cmpp(rdi, rcx); |
2254 __ j(equal, &miss); | 2409 __ j(equal, &miss); |
2255 | 2410 |
| 2411 __ movp(rax, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); |
| 2412 __ movp(rax, FieldOperand(rax, SharedFunctionInfo::kFunctionDataOffset)); |
| 2413 __ Cmp(rax, Smi::FromInt(0)); |
| 2414 __ j(not_equal, &miss); |
| 2415 |
2256 // Update stats. | 2416 // Update stats. |
2257 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); | 2417 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1)); |
2258 | 2418 |
2259 // Store the function. Use a stub since we need a frame for allocation. | 2419 // Store the function. Use a stub since we need a frame for allocation. |
2260 // rbx - vector | 2420 // rbx - vector |
2261 // rdx - slot (needs to be in smi form) | 2421 // rdx - slot (needs to be in smi form) |
2262 // rdi - function | 2422 // rdi - function |
2263 { | 2423 { |
2264 FrameScope scope(masm, StackFrame::INTERNAL); | 2424 FrameScope scope(masm, StackFrame::INTERNAL); |
2265 CreateWeakCellStub create_stub(isolate); | 2425 CreateWeakCellStub create_stub(isolate); |
(...skipping 2295 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4561 } | 4721 } |
4562 | 4722 |
4563 | 4723 |
4564 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { | 4724 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) { |
4565 EmitLoadTypeFeedbackVector(masm, rbx); | 4725 EmitLoadTypeFeedbackVector(masm, rbx); |
4566 CallIC_ArrayStub stub(isolate(), state()); | 4726 CallIC_ArrayStub stub(isolate(), state()); |
4567 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); | 4727 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
4568 } | 4728 } |
4569 | 4729 |
4570 | 4730 |
| 4731 void CallIC_RoundTrampolineStub::Generate(MacroAssembler* masm) { |
| 4732 EmitLoadTypeFeedbackVector(masm, rbx); |
| 4733 CallIC_RoundStub stub(isolate(), state()); |
| 4734 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4735 } |
| 4736 |
| 4737 |
| 4738 void CallIC_FloorTrampolineStub::Generate(MacroAssembler* masm) { |
| 4739 EmitLoadTypeFeedbackVector(masm, rbx); |
| 4740 CallIC_FloorStub stub(isolate(), state()); |
| 4741 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4742 } |
| 4743 |
| 4744 |
| 4745 void CallIC_CeilTrampolineStub::Generate(MacroAssembler* masm) { |
| 4746 EmitLoadTypeFeedbackVector(masm, rbx); |
| 4747 CallIC_CeilStub stub(isolate(), state()); |
| 4748 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 4749 } |
| 4750 |
| 4751 |
4571 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { | 4752 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { |
4572 if (masm->isolate()->function_entry_hook() != NULL) { | 4753 if (masm->isolate()->function_entry_hook() != NULL) { |
4573 ProfileEntryHookStub stub(masm->isolate()); | 4754 ProfileEntryHookStub stub(masm->isolate()); |
4574 masm->CallStub(&stub); | 4755 masm->CallStub(&stub); |
4575 } | 4756 } |
4576 } | 4757 } |
4577 | 4758 |
4578 | 4759 |
4579 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { | 4760 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { |
4580 // This stub can be called from essentially anywhere, so it needs to save | 4761 // This stub can be called from essentially anywhere, so it needs to save |
(...skipping 769 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5350 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, | 5531 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg, |
5351 kStackSpace, nullptr, return_value_operand, NULL); | 5532 kStackSpace, nullptr, return_value_operand, NULL); |
5352 } | 5533 } |
5353 | 5534 |
5354 | 5535 |
5355 #undef __ | 5536 #undef __ |
5356 | 5537 |
5357 } } // namespace v8::internal | 5538 } } // namespace v8::internal |
5358 | 5539 |
5359 #endif // V8_TARGET_ARCH_X64 | 5540 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |