OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1814 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1825 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; | 1825 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; |
1826 XMMRegister left_reg = ToDoubleRegister(left); | 1826 XMMRegister left_reg = ToDoubleRegister(left); |
1827 XMMRegister right_reg = ToDoubleRegister(right); | 1827 XMMRegister right_reg = ToDoubleRegister(right); |
1828 __ ucomisd(left_reg, right_reg); | 1828 __ ucomisd(left_reg, right_reg); |
1829 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. | 1829 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. |
1830 __ j(equal, &check_zero, Label::kNear); // left == right. | 1830 __ j(equal, &check_zero, Label::kNear); // left == right. |
1831 __ j(condition, &return_left, Label::kNear); | 1831 __ j(condition, &return_left, Label::kNear); |
1832 __ jmp(&return_right, Label::kNear); | 1832 __ jmp(&return_right, Label::kNear); |
1833 | 1833 |
1834 __ bind(&check_zero); | 1834 __ bind(&check_zero); |
1835 XMMRegister xmm_scratch = xmm0; | 1835 XMMRegister xmm_scratch = double_scratch0(); |
1836 __ xorps(xmm_scratch, xmm_scratch); | 1836 __ xorps(xmm_scratch, xmm_scratch); |
1837 __ ucomisd(left_reg, xmm_scratch); | 1837 __ ucomisd(left_reg, xmm_scratch); |
1838 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. | 1838 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. |
1839 // At this point, both left and right are either 0 or -0. | 1839 // At this point, both left and right are either 0 or -0. |
1840 if (operation == HMathMinMax::kMathMin) { | 1840 if (operation == HMathMinMax::kMathMin) { |
1841 __ orpd(left_reg, right_reg); | 1841 __ orpd(left_reg, right_reg); |
1842 } else { | 1842 } else { |
1843 // Since we operate on +0 and/or -0, addsd and andsd have the same effect. | 1843 // Since we operate on +0 and/or -0, addsd and andsd have the same effect. |
1844 __ addsd(left_reg, right_reg); | 1844 __ addsd(left_reg, right_reg); |
1845 } | 1845 } |
(...skipping 25 matching lines...) Expand all Loading... |
1871 break; | 1871 break; |
1872 case Token::MUL: | 1872 case Token::MUL: |
1873 __ mulsd(left, right); | 1873 __ mulsd(left, right); |
1874 break; | 1874 break; |
1875 case Token::DIV: | 1875 case Token::DIV: |
1876 __ divsd(left, right); | 1876 __ divsd(left, right); |
1877 // Don't delete this mov. It may improve performance on some CPUs, | 1877 // Don't delete this mov. It may improve performance on some CPUs, |
1878 // when there is a mulsd depending on the result | 1878 // when there is a mulsd depending on the result |
1879 __ movaps(left, left); | 1879 __ movaps(left, left); |
1880 break; | 1880 break; |
1881 case Token::MOD: | 1881 case Token::MOD: { |
| 1882 XMMRegister xmm_scratch = double_scratch0(); |
1882 __ PrepareCallCFunction(2); | 1883 __ PrepareCallCFunction(2); |
1883 __ movaps(xmm0, left); | 1884 __ movaps(xmm_scratch, left); |
1884 ASSERT(right.is(xmm1)); | 1885 ASSERT(right.is(xmm1)); |
1885 __ CallCFunction( | 1886 __ CallCFunction( |
1886 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); | 1887 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); |
1887 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1888 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
1888 __ movaps(result, xmm0); | 1889 __ movaps(result, xmm_scratch); |
1889 break; | 1890 break; |
| 1891 } |
1890 default: | 1892 default: |
1891 UNREACHABLE(); | 1893 UNREACHABLE(); |
1892 break; | 1894 break; |
1893 } | 1895 } |
1894 } | 1896 } |
1895 | 1897 |
1896 | 1898 |
1897 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1899 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
1898 ASSERT(ToRegister(instr->left()).is(rdx)); | 1900 ASSERT(ToRegister(instr->left()).is(rdx)); |
1899 ASSERT(ToRegister(instr->right()).is(rax)); | 1901 ASSERT(ToRegister(instr->right()).is(rax)); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1955 __ testl(reg, reg); | 1957 __ testl(reg, reg); |
1956 EmitBranch(instr, not_zero); | 1958 EmitBranch(instr, not_zero); |
1957 } else if (r.IsSmi()) { | 1959 } else if (r.IsSmi()) { |
1958 ASSERT(!info()->IsStub()); | 1960 ASSERT(!info()->IsStub()); |
1959 Register reg = ToRegister(instr->value()); | 1961 Register reg = ToRegister(instr->value()); |
1960 __ testq(reg, reg); | 1962 __ testq(reg, reg); |
1961 EmitBranch(instr, not_zero); | 1963 EmitBranch(instr, not_zero); |
1962 } else if (r.IsDouble()) { | 1964 } else if (r.IsDouble()) { |
1963 ASSERT(!info()->IsStub()); | 1965 ASSERT(!info()->IsStub()); |
1964 XMMRegister reg = ToDoubleRegister(instr->value()); | 1966 XMMRegister reg = ToDoubleRegister(instr->value()); |
1965 __ xorps(xmm0, xmm0); | 1967 XMMRegister xmm_scratch = double_scratch0(); |
1966 __ ucomisd(reg, xmm0); | 1968 __ xorps(xmm_scratch, xmm_scratch); |
| 1969 __ ucomisd(reg, xmm_scratch); |
1967 EmitBranch(instr, not_equal); | 1970 EmitBranch(instr, not_equal); |
1968 } else { | 1971 } else { |
1969 ASSERT(r.IsTagged()); | 1972 ASSERT(r.IsTagged()); |
1970 Register reg = ToRegister(instr->value()); | 1973 Register reg = ToRegister(instr->value()); |
1971 HType type = instr->hydrogen()->value()->type(); | 1974 HType type = instr->hydrogen()->value()->type(); |
1972 if (type.IsBoolean()) { | 1975 if (type.IsBoolean()) { |
1973 ASSERT(!info()->IsStub()); | 1976 ASSERT(!info()->IsStub()); |
1974 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 1977 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
1975 EmitBranch(instr, equal); | 1978 EmitBranch(instr, equal); |
1976 } else if (type.IsSmi()) { | 1979 } else if (type.IsSmi()) { |
1977 ASSERT(!info()->IsStub()); | 1980 ASSERT(!info()->IsStub()); |
1978 __ SmiCompare(reg, Smi::FromInt(0)); | 1981 __ SmiCompare(reg, Smi::FromInt(0)); |
1979 EmitBranch(instr, not_equal); | 1982 EmitBranch(instr, not_equal); |
1980 } else if (type.IsJSArray()) { | 1983 } else if (type.IsJSArray()) { |
1981 ASSERT(!info()->IsStub()); | 1984 ASSERT(!info()->IsStub()); |
1982 EmitBranch(instr, no_condition); | 1985 EmitBranch(instr, no_condition); |
1983 } else if (type.IsHeapNumber()) { | 1986 } else if (type.IsHeapNumber()) { |
1984 ASSERT(!info()->IsStub()); | 1987 ASSERT(!info()->IsStub()); |
1985 __ xorps(xmm0, xmm0); | 1988 XMMRegister xmm_scratch = double_scratch0(); |
1986 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); | 1989 __ xorps(xmm_scratch, xmm_scratch); |
| 1990 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); |
1987 EmitBranch(instr, not_equal); | 1991 EmitBranch(instr, not_equal); |
1988 } else if (type.IsString()) { | 1992 } else if (type.IsString()) { |
1989 ASSERT(!info()->IsStub()); | 1993 ASSERT(!info()->IsStub()); |
1990 __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0)); | 1994 __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0)); |
1991 EmitBranch(instr, not_equal); | 1995 EmitBranch(instr, not_equal); |
1992 } else { | 1996 } else { |
1993 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); | 1997 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
1994 // Avoid deopts in the case where we've never executed this path before. | 1998 // Avoid deopts in the case where we've never executed this path before. |
1995 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); | 1999 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); |
1996 | 2000 |
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2057 // Symbol value -> true. | 2061 // Symbol value -> true. |
2058 __ CmpInstanceType(map, SYMBOL_TYPE); | 2062 __ CmpInstanceType(map, SYMBOL_TYPE); |
2059 __ j(equal, instr->TrueLabel(chunk_)); | 2063 __ j(equal, instr->TrueLabel(chunk_)); |
2060 } | 2064 } |
2061 | 2065 |
2062 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { | 2066 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { |
2063 // heap number -> false iff +0, -0, or NaN. | 2067 // heap number -> false iff +0, -0, or NaN. |
2064 Label not_heap_number; | 2068 Label not_heap_number; |
2065 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | 2069 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
2066 __ j(not_equal, ¬_heap_number, Label::kNear); | 2070 __ j(not_equal, ¬_heap_number, Label::kNear); |
2067 __ xorps(xmm0, xmm0); | 2071 XMMRegister xmm_scratch = double_scratch0(); |
2068 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); | 2072 __ xorps(xmm_scratch, xmm_scratch); |
| 2073 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); |
2069 __ j(zero, instr->FalseLabel(chunk_)); | 2074 __ j(zero, instr->FalseLabel(chunk_)); |
2070 __ jmp(instr->TrueLabel(chunk_)); | 2075 __ jmp(instr->TrueLabel(chunk_)); |
2071 __ bind(¬_heap_number); | 2076 __ bind(¬_heap_number); |
2072 } | 2077 } |
2073 | 2078 |
2074 if (!expected.IsGeneric()) { | 2079 if (!expected.IsGeneric()) { |
2075 // We've seen something for the first time -> deopt. | 2080 // We've seen something for the first time -> deopt. |
2076 // This can only happen if we are not generic already. | 2081 // This can only happen if we are not generic already. |
2077 DeoptimizeIf(no_condition, instr->environment()); | 2082 DeoptimizeIf(no_condition, instr->environment()); |
2078 } | 2083 } |
(...skipping 1361 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3440 } | 3445 } |
3441 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 3446 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
3442 private: | 3447 private: |
3443 LMathAbs* instr_; | 3448 LMathAbs* instr_; |
3444 }; | 3449 }; |
3445 | 3450 |
3446 ASSERT(instr->value()->Equals(instr->result())); | 3451 ASSERT(instr->value()->Equals(instr->result())); |
3447 Representation r = instr->hydrogen()->value()->representation(); | 3452 Representation r = instr->hydrogen()->value()->representation(); |
3448 | 3453 |
3449 if (r.IsDouble()) { | 3454 if (r.IsDouble()) { |
3450 XMMRegister scratch = xmm0; | 3455 XMMRegister scratch = double_scratch0(); |
3451 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3456 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3452 __ xorps(scratch, scratch); | 3457 __ xorps(scratch, scratch); |
3453 __ subsd(scratch, input_reg); | 3458 __ subsd(scratch, input_reg); |
3454 __ andpd(input_reg, scratch); | 3459 __ andpd(input_reg, scratch); |
3455 } else if (r.IsInteger32()) { | 3460 } else if (r.IsInteger32()) { |
3456 EmitIntegerMathAbs(instr); | 3461 EmitIntegerMathAbs(instr); |
3457 } else if (r.IsSmi()) { | 3462 } else if (r.IsSmi()) { |
3458 EmitSmiMathAbs(instr); | 3463 EmitSmiMathAbs(instr); |
3459 } else { // Tagged case. | 3464 } else { // Tagged case. |
3460 DeferredMathAbsTaggedHeapNumber* deferred = | 3465 DeferredMathAbsTaggedHeapNumber* deferred = |
3461 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); | 3466 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); |
3462 Register input_reg = ToRegister(instr->value()); | 3467 Register input_reg = ToRegister(instr->value()); |
3463 // Smi check. | 3468 // Smi check. |
3464 __ JumpIfNotSmi(input_reg, deferred->entry()); | 3469 __ JumpIfNotSmi(input_reg, deferred->entry()); |
3465 EmitSmiMathAbs(instr); | 3470 EmitSmiMathAbs(instr); |
3466 __ bind(deferred->exit()); | 3471 __ bind(deferred->exit()); |
3467 } | 3472 } |
3468 } | 3473 } |
3469 | 3474 |
3470 | 3475 |
3471 void LCodeGen::DoMathFloor(LMathFloor* instr) { | 3476 void LCodeGen::DoMathFloor(LMathFloor* instr) { |
3472 XMMRegister xmm_scratch = xmm0; | 3477 XMMRegister xmm_scratch = double_scratch0(); |
3473 Register output_reg = ToRegister(instr->result()); | 3478 Register output_reg = ToRegister(instr->result()); |
3474 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3479 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3475 | 3480 |
3476 if (CpuFeatures::IsSupported(SSE4_1)) { | 3481 if (CpuFeatures::IsSupported(SSE4_1)) { |
3477 CpuFeatureScope scope(masm(), SSE4_1); | 3482 CpuFeatureScope scope(masm(), SSE4_1); |
3478 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3483 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
3479 // Deoptimize if minus zero. | 3484 // Deoptimize if minus zero. |
3480 __ movq(output_reg, input_reg); | 3485 __ movq(output_reg, input_reg); |
3481 __ subq(output_reg, Immediate(1)); | 3486 __ subq(output_reg, Immediate(1)); |
3482 DeoptimizeIf(overflow, instr->environment()); | 3487 DeoptimizeIf(overflow, instr->environment()); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3521 __ j(equal, &done, Label::kNear); | 3526 __ j(equal, &done, Label::kNear); |
3522 __ subl(output_reg, Immediate(1)); | 3527 __ subl(output_reg, Immediate(1)); |
3523 DeoptimizeIf(overflow, instr->environment()); | 3528 DeoptimizeIf(overflow, instr->environment()); |
3524 | 3529 |
3525 __ bind(&done); | 3530 __ bind(&done); |
3526 } | 3531 } |
3527 } | 3532 } |
3528 | 3533 |
3529 | 3534 |
3530 void LCodeGen::DoMathRound(LMathRound* instr) { | 3535 void LCodeGen::DoMathRound(LMathRound* instr) { |
3531 const XMMRegister xmm_scratch = xmm0; | 3536 const XMMRegister xmm_scratch = double_scratch0(); |
3532 Register output_reg = ToRegister(instr->result()); | 3537 Register output_reg = ToRegister(instr->result()); |
3533 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3538 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3534 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5 | 3539 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5 |
3535 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 | 3540 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 |
3536 | 3541 |
3537 Label done, round_to_zero, below_one_half, do_not_compensate, restore; | 3542 Label done, round_to_zero, below_one_half, do_not_compensate, restore; |
3538 __ movq(kScratchRegister, one_half, RelocInfo::NONE64); | 3543 __ movq(kScratchRegister, one_half, RelocInfo::NONE64); |
3539 __ movq(xmm_scratch, kScratchRegister); | 3544 __ movq(xmm_scratch, kScratchRegister); |
3540 __ ucomisd(xmm_scratch, input_reg); | 3545 __ ucomisd(xmm_scratch, input_reg); |
3541 __ j(above, &below_one_half); | 3546 __ j(above, &below_one_half); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3589 | 3594 |
3590 | 3595 |
3591 void LCodeGen::DoMathSqrt(LMathSqrt* instr) { | 3596 void LCodeGen::DoMathSqrt(LMathSqrt* instr) { |
3592 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3597 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3593 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 3598 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
3594 __ sqrtsd(input_reg, input_reg); | 3599 __ sqrtsd(input_reg, input_reg); |
3595 } | 3600 } |
3596 | 3601 |
3597 | 3602 |
3598 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) { | 3603 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) { |
3599 XMMRegister xmm_scratch = xmm0; | 3604 XMMRegister xmm_scratch = double_scratch0(); |
3600 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3605 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
3601 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 3606 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
3602 | 3607 |
3603 // Note that according to ECMA-262 15.8.2.13: | 3608 // Note that according to ECMA-262 15.8.2.13: |
3604 // Math.pow(-Infinity, 0.5) == Infinity | 3609 // Math.pow(-Infinity, 0.5) == Infinity |
3605 // Math.sqrt(-Infinity) == NaN | 3610 // Math.sqrt(-Infinity) == NaN |
3606 Label done, sqrt; | 3611 Label done, sqrt; |
3607 // Check base for -Infinity. According to IEEE-754, double-precision | 3612 // Check base for -Infinity. According to IEEE-754, double-precision |
3608 // -Infinity has the highest 12 bits set and the lowest 52 bits cleared. | 3613 // -Infinity has the highest 12 bits set and the lowest 52 bits cleared. |
3609 __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE64); | 3614 __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE64); |
(...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3706 // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF) | 3711 // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF) |
3707 Register random = state0; | 3712 Register random = state0; |
3708 __ shll(random, Immediate(14)); | 3713 __ shll(random, Immediate(14)); |
3709 __ andl(state1, Immediate(0x3FFFF)); | 3714 __ andl(state1, Immediate(0x3FFFF)); |
3710 __ addl(random, state1); | 3715 __ addl(random, state1); |
3711 | 3716 |
3712 // Convert 32 random bits in rax to 0.(32 random bits) in a double | 3717 // Convert 32 random bits in rax to 0.(32 random bits) in a double |
3713 // by computing: | 3718 // by computing: |
3714 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). | 3719 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). |
3715 XMMRegister result = ToDoubleRegister(instr->result()); | 3720 XMMRegister result = ToDoubleRegister(instr->result()); |
3716 // We use xmm0 as fixed scratch register here. | 3721 XMMRegister scratch4 = double_scratch0(); |
3717 XMMRegister scratch4 = xmm0; | |
3718 __ movq(scratch3, V8_INT64_C(0x4130000000000000), | 3722 __ movq(scratch3, V8_INT64_C(0x4130000000000000), |
3719 RelocInfo::NONE64); // 1.0 x 2^20 as double | 3723 RelocInfo::NONE64); // 1.0 x 2^20 as double |
3720 __ movq(scratch4, scratch3); | 3724 __ movq(scratch4, scratch3); |
3721 __ movd(result, random); | 3725 __ movd(result, random); |
3722 __ xorps(result, scratch4); | 3726 __ xorps(result, scratch4); |
3723 __ subsd(result, scratch4); | 3727 __ subsd(result, scratch4); |
3724 } | 3728 } |
3725 | 3729 |
3726 | 3730 |
3727 void LCodeGen::DoMathExp(LMathExp* instr) { | 3731 void LCodeGen::DoMathExp(LMathExp* instr) { |
3728 XMMRegister input = ToDoubleRegister(instr->value()); | 3732 XMMRegister input = ToDoubleRegister(instr->value()); |
3729 XMMRegister result = ToDoubleRegister(instr->result()); | 3733 XMMRegister result = ToDoubleRegister(instr->result()); |
| 3734 XMMRegister temp0 = double_scratch0(); |
3730 Register temp1 = ToRegister(instr->temp1()); | 3735 Register temp1 = ToRegister(instr->temp1()); |
3731 Register temp2 = ToRegister(instr->temp2()); | 3736 Register temp2 = ToRegister(instr->temp2()); |
3732 | 3737 |
3733 MathExpGenerator::EmitMathExp(masm(), input, result, xmm0, temp1, temp2); | 3738 MathExpGenerator::EmitMathExp(masm(), input, result, temp0, temp1, temp2); |
3734 } | 3739 } |
3735 | 3740 |
3736 | 3741 |
3737 void LCodeGen::DoMathLog(LMathLog* instr) { | 3742 void LCodeGen::DoMathLog(LMathLog* instr) { |
3738 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 3743 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
3739 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 3744 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
3740 TranscendentalCacheStub::UNTAGGED); | 3745 TranscendentalCacheStub::UNTAGGED); |
3741 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3746 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
3742 } | 3747 } |
3743 | 3748 |
(...skipping 780 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4524 Register reg = ToRegister(instr->value()); | 4529 Register reg = ToRegister(instr->value()); |
4525 Register tmp = reg.is(rax) ? rcx : rax; | 4530 Register tmp = reg.is(rax) ? rcx : rax; |
4526 | 4531 |
4527 // Preserve the value of all registers. | 4532 // Preserve the value of all registers. |
4528 PushSafepointRegistersScope scope(this); | 4533 PushSafepointRegistersScope scope(this); |
4529 | 4534 |
4530 Label done; | 4535 Label done; |
4531 // Load value into xmm1 which will be preserved across potential call to | 4536 // Load value into xmm1 which will be preserved across potential call to |
4532 // runtime (MacroAssembler::EnterExitFrameEpilogue preserves only allocatable | 4537 // runtime (MacroAssembler::EnterExitFrameEpilogue preserves only allocatable |
4533 // XMM registers on x64). | 4538 // XMM registers on x64). |
4534 __ LoadUint32(xmm1, reg, xmm0); | 4539 XMMRegister xmm_scratch = double_scratch0(); |
| 4540 __ LoadUint32(xmm1, reg, xmm_scratch); |
4535 | 4541 |
4536 if (FLAG_inline_new) { | 4542 if (FLAG_inline_new) { |
4537 __ AllocateHeapNumber(reg, tmp, &slow); | 4543 __ AllocateHeapNumber(reg, tmp, &slow); |
4538 __ jmp(&done, Label::kNear); | 4544 __ jmp(&done, Label::kNear); |
4539 } | 4545 } |
4540 | 4546 |
4541 // Slow case: Call the runtime system to do the number allocation. | 4547 // Slow case: Call the runtime system to do the number allocation. |
4542 __ bind(&slow); | 4548 __ bind(&slow); |
4543 | 4549 |
4544 // Put a valid pointer value in the stack slot where the result | 4550 // Put a valid pointer value in the stack slot where the result |
(...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4643 // check, since all heap objects are at least two words long. | 4649 // check, since all heap objects are at least two words long. |
4644 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4650 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
4645 | 4651 |
4646 if (can_convert_undefined_to_nan) { | 4652 if (can_convert_undefined_to_nan) { |
4647 __ j(not_equal, &convert); | 4653 __ j(not_equal, &convert); |
4648 } else { | 4654 } else { |
4649 DeoptimizeIf(not_equal, env); | 4655 DeoptimizeIf(not_equal, env); |
4650 } | 4656 } |
4651 | 4657 |
4652 if (deoptimize_on_minus_zero) { | 4658 if (deoptimize_on_minus_zero) { |
4653 XMMRegister xmm_scratch = xmm0; | 4659 XMMRegister xmm_scratch = double_scratch0(); |
4654 __ xorps(xmm_scratch, xmm_scratch); | 4660 __ xorps(xmm_scratch, xmm_scratch); |
4655 __ ucomisd(xmm_scratch, result_reg); | 4661 __ ucomisd(xmm_scratch, result_reg); |
4656 __ j(not_equal, &done, Label::kNear); | 4662 __ j(not_equal, &done, Label::kNear); |
4657 __ movmskpd(kScratchRegister, result_reg); | 4663 __ movmskpd(kScratchRegister, result_reg); |
4658 __ testq(kScratchRegister, Immediate(1)); | 4664 __ testq(kScratchRegister, Immediate(1)); |
4659 DeoptimizeIf(not_zero, env); | 4665 DeoptimizeIf(not_zero, env); |
4660 } | 4666 } |
4661 __ jmp(&done, Label::kNear); | 4667 __ jmp(&done, Label::kNear); |
4662 | 4668 |
4663 if (can_convert_undefined_to_nan) { | 4669 if (can_convert_undefined_to_nan) { |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4771 LOperand* result = instr->result(); | 4777 LOperand* result = instr->result(); |
4772 ASSERT(result->IsRegister()); | 4778 ASSERT(result->IsRegister()); |
4773 | 4779 |
4774 XMMRegister input_reg = ToDoubleRegister(input); | 4780 XMMRegister input_reg = ToDoubleRegister(input); |
4775 Register result_reg = ToRegister(result); | 4781 Register result_reg = ToRegister(result); |
4776 | 4782 |
4777 if (instr->truncating()) { | 4783 if (instr->truncating()) { |
4778 __ TruncateDoubleToI(result_reg, input_reg); | 4784 __ TruncateDoubleToI(result_reg, input_reg); |
4779 } else { | 4785 } else { |
4780 Label bailout, done; | 4786 Label bailout, done; |
4781 __ DoubleToI(result_reg, input_reg, xmm0, | 4787 XMMRegister xmm_scratch = double_scratch0(); |
| 4788 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
4782 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); | 4789 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); |
4783 | 4790 |
4784 __ jmp(&done, Label::kNear); | 4791 __ jmp(&done, Label::kNear); |
4785 __ bind(&bailout); | 4792 __ bind(&bailout); |
4786 DeoptimizeIf(no_condition, instr->environment()); | 4793 DeoptimizeIf(no_condition, instr->environment()); |
4787 __ bind(&done); | 4794 __ bind(&done); |
4788 } | 4795 } |
4789 } | 4796 } |
4790 | 4797 |
4791 | 4798 |
4792 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 4799 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
4793 LOperand* input = instr->value(); | 4800 LOperand* input = instr->value(); |
4794 ASSERT(input->IsDoubleRegister()); | 4801 ASSERT(input->IsDoubleRegister()); |
4795 LOperand* result = instr->result(); | 4802 LOperand* result = instr->result(); |
4796 ASSERT(result->IsRegister()); | 4803 ASSERT(result->IsRegister()); |
4797 | 4804 |
4798 XMMRegister input_reg = ToDoubleRegister(input); | 4805 XMMRegister input_reg = ToDoubleRegister(input); |
4799 Register result_reg = ToRegister(result); | 4806 Register result_reg = ToRegister(result); |
4800 | 4807 |
4801 Label bailout, done; | 4808 Label bailout, done; |
4802 __ DoubleToI(result_reg, input_reg, xmm0, | 4809 XMMRegister xmm_scratch = double_scratch0(); |
| 4810 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
4803 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); | 4811 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); |
4804 | 4812 |
4805 __ jmp(&done, Label::kNear); | 4813 __ jmp(&done, Label::kNear); |
4806 __ bind(&bailout); | 4814 __ bind(&bailout); |
4807 DeoptimizeIf(no_condition, instr->environment()); | 4815 DeoptimizeIf(no_condition, instr->environment()); |
4808 __ bind(&done); | 4816 __ bind(&done); |
4809 | 4817 |
4810 __ Integer32ToSmi(result_reg, result_reg); | 4818 __ Integer32ToSmi(result_reg, result_reg); |
4811 DeoptimizeIf(overflow, instr->environment()); | 4819 DeoptimizeIf(overflow, instr->environment()); |
4812 } | 4820 } |
(...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4938 } else { | 4946 } else { |
4939 DeoptimizeIf(not_equal, instr->environment()); | 4947 DeoptimizeIf(not_equal, instr->environment()); |
4940 } | 4948 } |
4941 | 4949 |
4942 __ bind(&success); | 4950 __ bind(&success); |
4943 } | 4951 } |
4944 | 4952 |
4945 | 4953 |
4946 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 4954 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
4947 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); | 4955 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); |
| 4956 XMMRegister xmm_scratch = double_scratch0(); |
4948 Register result_reg = ToRegister(instr->result()); | 4957 Register result_reg = ToRegister(instr->result()); |
4949 __ ClampDoubleToUint8(value_reg, xmm0, result_reg); | 4958 __ ClampDoubleToUint8(value_reg, xmm_scratch, result_reg); |
4950 } | 4959 } |
4951 | 4960 |
4952 | 4961 |
4953 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { | 4962 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { |
4954 ASSERT(instr->unclamped()->Equals(instr->result())); | 4963 ASSERT(instr->unclamped()->Equals(instr->result())); |
4955 Register value_reg = ToRegister(instr->result()); | 4964 Register value_reg = ToRegister(instr->result()); |
4956 __ ClampUint8(value_reg); | 4965 __ ClampUint8(value_reg); |
4957 } | 4966 } |
4958 | 4967 |
4959 | 4968 |
4960 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { | 4969 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { |
4961 ASSERT(instr->unclamped()->Equals(instr->result())); | 4970 ASSERT(instr->unclamped()->Equals(instr->result())); |
4962 Register input_reg = ToRegister(instr->unclamped()); | 4971 Register input_reg = ToRegister(instr->unclamped()); |
4963 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); | 4972 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); |
| 4973 XMMRegister xmm_scratch = double_scratch0(); |
4964 Label is_smi, done, heap_number; | 4974 Label is_smi, done, heap_number; |
4965 | 4975 |
4966 __ JumpIfSmi(input_reg, &is_smi); | 4976 __ JumpIfSmi(input_reg, &is_smi); |
4967 | 4977 |
4968 // Check for heap number | 4978 // Check for heap number |
4969 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4979 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
4970 factory()->heap_number_map()); | 4980 factory()->heap_number_map()); |
4971 __ j(equal, &heap_number, Label::kNear); | 4981 __ j(equal, &heap_number, Label::kNear); |
4972 | 4982 |
4973 // Check for undefined. Undefined is converted to zero for clamping | 4983 // Check for undefined. Undefined is converted to zero for clamping |
4974 // conversions. | 4984 // conversions. |
4975 __ Cmp(input_reg, factory()->undefined_value()); | 4985 __ Cmp(input_reg, factory()->undefined_value()); |
4976 DeoptimizeIf(not_equal, instr->environment()); | 4986 DeoptimizeIf(not_equal, instr->environment()); |
4977 __ movq(input_reg, Immediate(0)); | 4987 __ movq(input_reg, Immediate(0)); |
4978 __ jmp(&done, Label::kNear); | 4988 __ jmp(&done, Label::kNear); |
4979 | 4989 |
4980 // Heap number | 4990 // Heap number |
4981 __ bind(&heap_number); | 4991 __ bind(&heap_number); |
4982 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4992 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
4983 __ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg); | 4993 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); |
4984 __ jmp(&done, Label::kNear); | 4994 __ jmp(&done, Label::kNear); |
4985 | 4995 |
4986 // smi | 4996 // smi |
4987 __ bind(&is_smi); | 4997 __ bind(&is_smi); |
4988 __ SmiToInteger32(input_reg, input_reg); | 4998 __ SmiToInteger32(input_reg, input_reg); |
4989 __ ClampUint8(input_reg); | 4999 __ ClampUint8(input_reg); |
4990 | 5000 |
4991 __ bind(&done); | 5001 __ bind(&done); |
4992 } | 5002 } |
4993 | 5003 |
(...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5506 FixedArray::kHeaderSize - kPointerSize)); | 5516 FixedArray::kHeaderSize - kPointerSize)); |
5507 __ bind(&done); | 5517 __ bind(&done); |
5508 } | 5518 } |
5509 | 5519 |
5510 | 5520 |
5511 #undef __ | 5521 #undef __ |
5512 | 5522 |
5513 } } // namespace v8::internal | 5523 } } // namespace v8::internal |
5514 | 5524 |
5515 #endif // V8_TARGET_ARCH_X64 | 5525 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |