OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2473 XMMRegister xmm_scratch = xmm0; | 2473 XMMRegister xmm_scratch = xmm0; |
2474 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); | 2474 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); |
2475 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 2475 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
2476 __ xorpd(xmm_scratch, xmm_scratch); | 2476 __ xorpd(xmm_scratch, xmm_scratch); |
2477 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. | 2477 __ addsd(input_reg, xmm_scratch); // Convert -0 to +0. |
2478 __ sqrtsd(input_reg, input_reg); | 2478 __ sqrtsd(input_reg, input_reg); |
2479 } | 2479 } |
2480 | 2480 |
2481 | 2481 |
2482 void LCodeGen::DoPower(LPower* instr) { | 2482 void LCodeGen::DoPower(LPower* instr) { |
2483 Abort("Unimplemented: %s", "DoPower"); | 2483 LOperand* left = instr->InputAt(0); |
| 2484 XMMRegister left_reg = ToDoubleRegister(left); |
| 2485 ASSERT(!left_reg.is(xmm1)); |
| 2486 LOperand* right = instr->InputAt(1); |
| 2487 XMMRegister result_reg = ToDoubleRegister(instr->result()); |
| 2488 Representation exponent_type = instr->hydrogen()->right()->representation(); |
| 2489 if (exponent_type.IsDouble()) { |
| 2490 __ PrepareCallCFunction(2); |
| 2491 // Move arguments to correct registers |
| 2492 __ movsd(xmm0, left_reg); |
| 2493 ASSERT(ToDoubleRegister(right).is(xmm1)); |
| 2494 __ CallCFunction(ExternalReference::power_double_double_function(), 2); |
| 2495 } else if (exponent_type.IsInteger32()) { |
| 2496 __ PrepareCallCFunction(2); |
| 2497 // Move arguments to correct registers: xmm0 and edi (not rdi). |
| 2498 // On Windows, the registers are xmm0 and edx. |
| 2499 __ movsd(xmm0, left_reg); |
| 2500 #ifdef _WIN64 |
| 2501 ASSERT(ToRegister(right).is(rdx)); |
| 2502 #else |
| 2503 ASSERT(ToRegister(right).is(rdi)); |
| 2504 #endif |
| 2505 __ CallCFunction(ExternalReference::power_double_int_function(), 2); |
| 2506 } else { |
| 2507 ASSERT(exponent_type.IsTagged()); |
| 2508 CpuFeatures::Scope scope(SSE2); |
| 2509 Register right_reg = ToRegister(right); |
| 2510 |
| 2511 Label non_smi, call; |
| 2512 __ JumpIfNotSmi(right_reg, &non_smi); |
| 2513 __ SmiToInteger32(right_reg, right_reg); |
| 2514 __ cvtlsi2sd(xmm1, right_reg); |
| 2515 __ jmp(&call); |
| 2516 |
| 2517 __ bind(&non_smi); |
| 2518 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister); |
| 2519 DeoptimizeIf(not_equal, instr->environment()); |
| 2520 __ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset)); |
| 2521 |
| 2522 __ bind(&call); |
| 2523 __ PrepareCallCFunction(2); |
| 2524 // Move arguments to correct registers xmm0 and xmm1. |
| 2525 __ movsd(xmm0, left_reg); |
| 2526 // Right argument is already in xmm1. |
| 2527 __ CallCFunction(ExternalReference::power_double_double_function(), 2); |
| 2528 } |
| 2529 // Return value is in xmm0. |
| 2530 __ movsd(result_reg, xmm0); |
2484 } | 2531 } |
2485 | 2532 |
2486 | 2533 |
2487 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { | 2534 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
2488 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2535 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
2489 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 2536 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
2490 TranscendentalCacheStub::UNTAGGED); | 2537 TranscendentalCacheStub::UNTAGGED); |
2491 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2538 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2492 } | 2539 } |
2493 | 2540 |
(...skipping 1076 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3570 RegisterEnvironmentForDeoptimization(environment); | 3617 RegisterEnvironmentForDeoptimization(environment); |
3571 ASSERT(osr_pc_offset_ == -1); | 3618 ASSERT(osr_pc_offset_ == -1); |
3572 osr_pc_offset_ = masm()->pc_offset(); | 3619 osr_pc_offset_ = masm()->pc_offset(); |
3573 } | 3620 } |
3574 | 3621 |
3575 #undef __ | 3622 #undef __ |
3576 | 3623 |
3577 } } // namespace v8::internal | 3624 } } // namespace v8::internal |
3578 | 3625 |
3579 #endif // V8_TARGET_ARCH_X64 | 3626 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |