| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 2304 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2315 | 2315 |
| 2316 // Smi tag and return. | 2316 // Smi tag and return. |
| 2317 __ Integer32ToSmi(rax, rax); | 2317 __ Integer32ToSmi(rax, rax); |
| 2318 __ bind(&smi); | 2318 __ bind(&smi); |
| 2319 __ ret(2 * kPointerSize); | 2319 __ ret(2 * kPointerSize); |
| 2320 | 2320 |
| 2321 // Check if the argument is < 2^kMantissaBits. | 2321 // Check if the argument is < 2^kMantissaBits. |
| 2322 Label already_round; | 2322 Label already_round; |
| 2323 __ bind(&conversion_failure); | 2323 __ bind(&conversion_failure); |
| 2324 int64_t kTwoMantissaBits= V8_INT64_C(0x4330000000000000); | 2324 int64_t kTwoMantissaBits= V8_INT64_C(0x4330000000000000); |
| 2325 __ movq(rbx, kTwoMantissaBits, RelocInfo::NONE64); | 2325 __ movq(rbx, kTwoMantissaBits); |
| 2326 __ movq(xmm1, rbx); | 2326 __ movq(xmm1, rbx); |
| 2327 __ ucomisd(xmm0, xmm1); | 2327 __ ucomisd(xmm0, xmm1); |
| 2328 __ j(above_equal, &already_round); | 2328 __ j(above_equal, &already_round); |
| 2329 | 2329 |
| 2330 // Save a copy of the argument. | 2330 // Save a copy of the argument. |
| 2331 __ movaps(xmm2, xmm0); | 2331 __ movaps(xmm2, xmm0); |
| 2332 | 2332 |
| 2333 // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits. | 2333 // Compute (argument + 2^kMantissaBits) - 2^kMantissaBits. |
| 2334 __ addsd(xmm0, xmm1); | 2334 __ addsd(xmm0, xmm1); |
| 2335 __ subsd(xmm0, xmm1); | 2335 __ subsd(xmm0, xmm1); |
| 2336 | 2336 |
| 2337 // Compare the argument and the tentative result to get the right mask: | 2337 // Compare the argument and the tentative result to get the right mask: |
| 2338 // if xmm2 < xmm0: | 2338 // if xmm2 < xmm0: |
| 2339 // xmm2 = 1...1 | 2339 // xmm2 = 1...1 |
| 2340 // else: | 2340 // else: |
| 2341 // xmm2 = 0...0 | 2341 // xmm2 = 0...0 |
| 2342 __ cmpltsd(xmm2, xmm0); | 2342 __ cmpltsd(xmm2, xmm0); |
| 2343 | 2343 |
| 2344 // Subtract 1 if the argument was less than the tentative result. | 2344 // Subtract 1 if the argument was less than the tentative result. |
| 2345 int64_t kOne = V8_INT64_C(0x3ff0000000000000); | 2345 int64_t kOne = V8_INT64_C(0x3ff0000000000000); |
| 2346 __ movq(rbx, kOne, RelocInfo::NONE64); | 2346 __ movq(rbx, kOne); |
| 2347 __ movq(xmm1, rbx); | 2347 __ movq(xmm1, rbx); |
| 2348 __ andpd(xmm1, xmm2); | 2348 __ andpd(xmm1, xmm2); |
| 2349 __ subsd(xmm0, xmm1); | 2349 __ subsd(xmm0, xmm1); |
| 2350 | 2350 |
| 2351 // Return a new heap number. | 2351 // Return a new heap number. |
| 2352 __ AllocateHeapNumber(rax, rbx, &slow); | 2352 __ AllocateHeapNumber(rax, rbx, &slow); |
| 2353 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); | 2353 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm0); |
| 2354 __ ret(2 * kPointerSize); | 2354 __ ret(2 * kPointerSize); |
| 2355 | 2355 |
| 2356 // Return the argument (when it's an already round heap number). | 2356 // Return the argument (when it's an already round heap number). |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2440 // Check if the argument is a heap number and load its value. | 2440 // Check if the argument is a heap number and load its value. |
| 2441 __ bind(¬_smi); | 2441 __ bind(¬_smi); |
| 2442 __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK); | 2442 __ CheckMap(rax, factory()->heap_number_map(), &slow, DONT_DO_SMI_CHECK); |
| 2443 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); | 2443 __ movq(rbx, FieldOperand(rax, HeapNumber::kValueOffset)); |
| 2444 | 2444 |
| 2445 // Check the sign of the argument. If the argument is positive, | 2445 // Check the sign of the argument. If the argument is positive, |
| 2446 // just return it. | 2446 // just return it. |
| 2447 Label negative_sign; | 2447 Label negative_sign; |
| 2448 const int sign_mask_shift = | 2448 const int sign_mask_shift = |
| 2449 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte; | 2449 (HeapNumber::kExponentOffset - HeapNumber::kValueOffset) * kBitsPerByte; |
| 2450 __ movq(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift, | 2450 __ Set(rdi, static_cast<int64_t>(HeapNumber::kSignMask) << sign_mask_shift); |
| 2451 RelocInfo::NONE64); | |
| 2452 __ testq(rbx, rdi); | 2451 __ testq(rbx, rdi); |
| 2453 __ j(not_zero, &negative_sign); | 2452 __ j(not_zero, &negative_sign); |
| 2454 __ ret(2 * kPointerSize); | 2453 __ ret(2 * kPointerSize); |
| 2455 | 2454 |
| 2456 // If the argument is negative, clear the sign, and return a new | 2455 // If the argument is negative, clear the sign, and return a new |
| 2457 // number. We still have the sign mask in rdi. | 2456 // number. We still have the sign mask in rdi. |
| 2458 __ bind(&negative_sign); | 2457 __ bind(&negative_sign); |
| 2459 __ xor_(rbx, rdi); | 2458 __ xor_(rbx, rdi); |
| 2460 __ AllocateHeapNumber(rax, rdx, &slow); | 2459 __ AllocateHeapNumber(rax, rdx, &slow); |
| 2461 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx); | 2460 __ movq(FieldOperand(rax, HeapNumber::kValueOffset), rbx); |
| (...skipping 717 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3179 // ----------------------------------- | 3178 // ----------------------------------- |
| 3180 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric); | 3179 TailCallBuiltin(masm, Builtins::kKeyedLoadIC_MissForceGeneric); |
| 3181 } | 3180 } |
| 3182 | 3181 |
| 3183 | 3182 |
| 3184 #undef __ | 3183 #undef __ |
| 3185 | 3184 |
| 3186 } } // namespace v8::internal | 3185 } } // namespace v8::internal |
| 3187 | 3186 |
| 3188 #endif // V8_TARGET_ARCH_X64 | 3187 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |