| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1193 case Token::MUL: | 1193 case Token::MUL: |
| 1194 __ mulsd(left, right); | 1194 __ mulsd(left, right); |
| 1195 break; | 1195 break; |
| 1196 case Token::DIV: | 1196 case Token::DIV: |
| 1197 __ divsd(left, right); | 1197 __ divsd(left, right); |
| 1198 break; | 1198 break; |
| 1199 case Token::MOD: | 1199 case Token::MOD: |
| 1200 __ PrepareCallCFunction(2); | 1200 __ PrepareCallCFunction(2); |
| 1201 __ movsd(xmm0, left); | 1201 __ movsd(xmm0, left); |
| 1202 ASSERT(right.is(xmm1)); | 1202 ASSERT(right.is(xmm1)); |
| 1203 __ CallCFunction(ExternalReference::double_fp_operation(Token::MOD), 2); | 1203 __ CallCFunction( |
| 1204 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); |
| 1204 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1205 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 1205 __ movsd(result, xmm0); | 1206 __ movsd(result, xmm0); |
| 1206 break; | 1207 break; |
| 1207 default: | 1208 default: |
| 1208 UNREACHABLE(); | 1209 UNREACHABLE(); |
| 1209 break; | 1210 break; |
| 1210 } | 1211 } |
| 1211 } | 1212 } |
| 1212 | 1213 |
| 1213 | 1214 |
| (...skipping 1400 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2614 XMMRegister left_reg = ToDoubleRegister(left); | 2615 XMMRegister left_reg = ToDoubleRegister(left); |
| 2615 ASSERT(!left_reg.is(xmm1)); | 2616 ASSERT(!left_reg.is(xmm1)); |
| 2616 LOperand* right = instr->InputAt(1); | 2617 LOperand* right = instr->InputAt(1); |
| 2617 XMMRegister result_reg = ToDoubleRegister(instr->result()); | 2618 XMMRegister result_reg = ToDoubleRegister(instr->result()); |
| 2618 Representation exponent_type = instr->hydrogen()->right()->representation(); | 2619 Representation exponent_type = instr->hydrogen()->right()->representation(); |
| 2619 if (exponent_type.IsDouble()) { | 2620 if (exponent_type.IsDouble()) { |
| 2620 __ PrepareCallCFunction(2); | 2621 __ PrepareCallCFunction(2); |
| 2621 // Move arguments to correct registers | 2622 // Move arguments to correct registers |
| 2622 __ movsd(xmm0, left_reg); | 2623 __ movsd(xmm0, left_reg); |
| 2623 ASSERT(ToDoubleRegister(right).is(xmm1)); | 2624 ASSERT(ToDoubleRegister(right).is(xmm1)); |
| 2624 __ CallCFunction(ExternalReference::power_double_double_function(), 2); | 2625 __ CallCFunction( |
| 2626 ExternalReference::power_double_double_function(isolate()), 2); |
| 2625 } else if (exponent_type.IsInteger32()) { | 2627 } else if (exponent_type.IsInteger32()) { |
| 2626 __ PrepareCallCFunction(2); | 2628 __ PrepareCallCFunction(2); |
| 2627 // Move arguments to correct registers: xmm0 and edi (not rdi). | 2629 // Move arguments to correct registers: xmm0 and edi (not rdi). |
| 2628 // On Windows, the registers are xmm0 and edx. | 2630 // On Windows, the registers are xmm0 and edx. |
| 2629 __ movsd(xmm0, left_reg); | 2631 __ movsd(xmm0, left_reg); |
| 2630 #ifdef _WIN64 | 2632 #ifdef _WIN64 |
| 2631 ASSERT(ToRegister(right).is(rdx)); | 2633 ASSERT(ToRegister(right).is(rdx)); |
| 2632 #else | 2634 #else |
| 2633 ASSERT(ToRegister(right).is(rdi)); | 2635 ASSERT(ToRegister(right).is(rdi)); |
| 2634 #endif | 2636 #endif |
| 2635 __ CallCFunction(ExternalReference::power_double_int_function(), 2); | 2637 __ CallCFunction( |
| 2638 ExternalReference::power_double_int_function(isolate()), 2); |
| 2636 } else { | 2639 } else { |
| 2637 ASSERT(exponent_type.IsTagged()); | 2640 ASSERT(exponent_type.IsTagged()); |
| 2638 CpuFeatures::Scope scope(SSE2); | 2641 CpuFeatures::Scope scope(SSE2); |
| 2639 Register right_reg = ToRegister(right); | 2642 Register right_reg = ToRegister(right); |
| 2640 | 2643 |
| 2641 Label non_smi, call; | 2644 Label non_smi, call; |
| 2642 __ JumpIfNotSmi(right_reg, &non_smi); | 2645 __ JumpIfNotSmi(right_reg, &non_smi); |
| 2643 __ SmiToInteger32(right_reg, right_reg); | 2646 __ SmiToInteger32(right_reg, right_reg); |
| 2644 __ cvtlsi2sd(xmm1, right_reg); | 2647 __ cvtlsi2sd(xmm1, right_reg); |
| 2645 __ jmp(&call); | 2648 __ jmp(&call); |
| 2646 | 2649 |
| 2647 __ bind(&non_smi); | 2650 __ bind(&non_smi); |
| 2648 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister); | 2651 __ CmpObjectType(right_reg, HEAP_NUMBER_TYPE , kScratchRegister); |
| 2649 DeoptimizeIf(not_equal, instr->environment()); | 2652 DeoptimizeIf(not_equal, instr->environment()); |
| 2650 __ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset)); | 2653 __ movsd(xmm1, FieldOperand(right_reg, HeapNumber::kValueOffset)); |
| 2651 | 2654 |
| 2652 __ bind(&call); | 2655 __ bind(&call); |
| 2653 __ PrepareCallCFunction(2); | 2656 __ PrepareCallCFunction(2); |
| 2654 // Move arguments to correct registers xmm0 and xmm1. | 2657 // Move arguments to correct registers xmm0 and xmm1. |
| 2655 __ movsd(xmm0, left_reg); | 2658 __ movsd(xmm0, left_reg); |
| 2656 // Right argument is already in xmm1. | 2659 // Right argument is already in xmm1. |
| 2657 __ CallCFunction(ExternalReference::power_double_double_function(), 2); | 2660 __ CallCFunction( |
| 2661 ExternalReference::power_double_double_function(isolate()), 2); |
| 2658 } | 2662 } |
| 2659 // Return value is in xmm0. | 2663 // Return value is in xmm0. |
| 2660 __ movsd(result_reg, xmm0); | 2664 __ movsd(result_reg, xmm0); |
| 2661 // Restore context register. | 2665 // Restore context register. |
| 2662 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 2666 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 2663 } | 2667 } |
| 2664 | 2668 |
| 2665 | 2669 |
| 2666 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { | 2670 void LCodeGen::DoMathLog(LUnaryMathOperation* instr) { |
| 2667 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 2671 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| (...skipping 1151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3819 RegisterEnvironmentForDeoptimization(environment); | 3823 RegisterEnvironmentForDeoptimization(environment); |
| 3820 ASSERT(osr_pc_offset_ == -1); | 3824 ASSERT(osr_pc_offset_ == -1); |
| 3821 osr_pc_offset_ = masm()->pc_offset(); | 3825 osr_pc_offset_ = masm()->pc_offset(); |
| 3822 } | 3826 } |
| 3823 | 3827 |
| 3824 #undef __ | 3828 #undef __ |
| 3825 | 3829 |
| 3826 } } // namespace v8::internal | 3830 } } // namespace v8::internal |
| 3827 | 3831 |
| 3828 #endif // V8_TARGET_ARCH_X64 | 3832 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |