| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1814 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1825 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; | 1825 Condition condition = (operation == HMathMinMax::kMathMin) ? below : above; |
| 1826 XMMRegister left_reg = ToDoubleRegister(left); | 1826 XMMRegister left_reg = ToDoubleRegister(left); |
| 1827 XMMRegister right_reg = ToDoubleRegister(right); | 1827 XMMRegister right_reg = ToDoubleRegister(right); |
| 1828 __ ucomisd(left_reg, right_reg); | 1828 __ ucomisd(left_reg, right_reg); |
| 1829 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. | 1829 __ j(parity_even, &check_nan_left, Label::kNear); // At least one NaN. |
| 1830 __ j(equal, &check_zero, Label::kNear); // left == right. | 1830 __ j(equal, &check_zero, Label::kNear); // left == right. |
| 1831 __ j(condition, &return_left, Label::kNear); | 1831 __ j(condition, &return_left, Label::kNear); |
| 1832 __ jmp(&return_right, Label::kNear); | 1832 __ jmp(&return_right, Label::kNear); |
| 1833 | 1833 |
| 1834 __ bind(&check_zero); | 1834 __ bind(&check_zero); |
| 1835 XMMRegister xmm_scratch = xmm0; | 1835 XMMRegister xmm_scratch = double_scratch0(); |
| 1836 __ xorps(xmm_scratch, xmm_scratch); | 1836 __ xorps(xmm_scratch, xmm_scratch); |
| 1837 __ ucomisd(left_reg, xmm_scratch); | 1837 __ ucomisd(left_reg, xmm_scratch); |
| 1838 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. | 1838 __ j(not_equal, &return_left, Label::kNear); // left == right != 0. |
| 1839 // At this point, both left and right are either 0 or -0. | 1839 // At this point, both left and right are either 0 or -0. |
| 1840 if (operation == HMathMinMax::kMathMin) { | 1840 if (operation == HMathMinMax::kMathMin) { |
| 1841 __ orpd(left_reg, right_reg); | 1841 __ orpd(left_reg, right_reg); |
| 1842 } else { | 1842 } else { |
| 1843 // Since we operate on +0 and/or -0, addsd and andsd have the same effect. | 1843 // Since we operate on +0 and/or -0, addsd and andsd have the same effect. |
| 1844 __ addsd(left_reg, right_reg); | 1844 __ addsd(left_reg, right_reg); |
| 1845 } | 1845 } |
| (...skipping 25 matching lines...) Expand all Loading... |
| 1871 break; | 1871 break; |
| 1872 case Token::MUL: | 1872 case Token::MUL: |
| 1873 __ mulsd(left, right); | 1873 __ mulsd(left, right); |
| 1874 break; | 1874 break; |
| 1875 case Token::DIV: | 1875 case Token::DIV: |
| 1876 __ divsd(left, right); | 1876 __ divsd(left, right); |
| 1877 // Don't delete this mov. It may improve performance on some CPUs, | 1877 // Don't delete this mov. It may improve performance on some CPUs, |
| 1878 // when there is a mulsd depending on the result | 1878 // when there is a mulsd depending on the result |
| 1879 __ movaps(left, left); | 1879 __ movaps(left, left); |
| 1880 break; | 1880 break; |
| 1881 case Token::MOD: | 1881 case Token::MOD: { |
| 1882 XMMRegister xmm_scratch = double_scratch0(); |
| 1882 __ PrepareCallCFunction(2); | 1883 __ PrepareCallCFunction(2); |
| 1883 __ movaps(xmm0, left); | 1884 __ movaps(xmm_scratch, left); |
| 1884 ASSERT(right.is(xmm1)); | 1885 ASSERT(right.is(xmm1)); |
| 1885 __ CallCFunction( | 1886 __ CallCFunction( |
| 1886 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); | 1887 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2); |
| 1887 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); | 1888 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); |
| 1888 __ movaps(result, xmm0); | 1889 __ movaps(result, xmm_scratch); |
| 1889 break; | 1890 break; |
| 1891 } |
| 1890 default: | 1892 default: |
| 1891 UNREACHABLE(); | 1893 UNREACHABLE(); |
| 1892 break; | 1894 break; |
| 1893 } | 1895 } |
| 1894 } | 1896 } |
| 1895 | 1897 |
| 1896 | 1898 |
| 1897 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { | 1899 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { |
| 1898 ASSERT(ToRegister(instr->left()).is(rdx)); | 1900 ASSERT(ToRegister(instr->left()).is(rdx)); |
| 1899 ASSERT(ToRegister(instr->right()).is(rax)); | 1901 ASSERT(ToRegister(instr->right()).is(rax)); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1940 int false_block = instr->FalseDestination(chunk_); | 1942 int false_block = instr->FalseDestination(chunk_); |
| 1941 __ j(cc, chunk_->GetAssemblyLabel(false_block)); | 1943 __ j(cc, chunk_->GetAssemblyLabel(false_block)); |
| 1942 } | 1944 } |
| 1943 | 1945 |
| 1944 | 1946 |
| 1945 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { | 1947 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { |
| 1946 __ int3(); | 1948 __ int3(); |
| 1947 } | 1949 } |
| 1948 | 1950 |
| 1949 | 1951 |
| 1950 void LCodeGen::DoIsNumberAndBranch(LIsNumberAndBranch* instr) { | |
| 1951 Representation r = instr->hydrogen()->value()->representation(); | |
| 1952 if (r.IsSmiOrInteger32() || r.IsDouble()) { | |
| 1953 EmitBranch(instr, no_condition); | |
| 1954 } else { | |
| 1955 ASSERT(r.IsTagged()); | |
| 1956 Register reg = ToRegister(instr->value()); | |
| 1957 HType type = instr->hydrogen()->value()->type(); | |
| 1958 if (type.IsTaggedNumber()) { | |
| 1959 EmitBranch(instr, no_condition); | |
| 1960 } | |
| 1961 __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); | |
| 1962 __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset), | |
| 1963 Heap::kHeapNumberMapRootIndex); | |
| 1964 EmitBranch(instr, equal); | |
| 1965 } | |
| 1966 } | |
| 1967 | |
| 1968 | |
| 1969 void LCodeGen::DoBranch(LBranch* instr) { | 1952 void LCodeGen::DoBranch(LBranch* instr) { |
| 1970 Representation r = instr->hydrogen()->value()->representation(); | 1953 Representation r = instr->hydrogen()->value()->representation(); |
| 1971 if (r.IsInteger32()) { | 1954 if (r.IsInteger32()) { |
| 1972 ASSERT(!info()->IsStub()); | 1955 ASSERT(!info()->IsStub()); |
| 1973 Register reg = ToRegister(instr->value()); | 1956 Register reg = ToRegister(instr->value()); |
| 1974 __ testl(reg, reg); | 1957 __ testl(reg, reg); |
| 1975 EmitBranch(instr, not_zero); | 1958 EmitBranch(instr, not_zero); |
| 1976 } else if (r.IsSmi()) { | 1959 } else if (r.IsSmi()) { |
| 1977 ASSERT(!info()->IsStub()); | 1960 ASSERT(!info()->IsStub()); |
| 1978 Register reg = ToRegister(instr->value()); | 1961 Register reg = ToRegister(instr->value()); |
| 1979 __ testq(reg, reg); | 1962 __ testq(reg, reg); |
| 1980 EmitBranch(instr, not_zero); | 1963 EmitBranch(instr, not_zero); |
| 1981 } else if (r.IsDouble()) { | 1964 } else if (r.IsDouble()) { |
| 1982 ASSERT(!info()->IsStub()); | 1965 ASSERT(!info()->IsStub()); |
| 1983 XMMRegister reg = ToDoubleRegister(instr->value()); | 1966 XMMRegister reg = ToDoubleRegister(instr->value()); |
| 1984 __ xorps(xmm0, xmm0); | 1967 XMMRegister xmm_scratch = double_scratch0(); |
| 1985 __ ucomisd(reg, xmm0); | 1968 __ xorps(xmm_scratch, xmm_scratch); |
| 1969 __ ucomisd(reg, xmm_scratch); |
| 1986 EmitBranch(instr, not_equal); | 1970 EmitBranch(instr, not_equal); |
| 1987 } else { | 1971 } else { |
| 1988 ASSERT(r.IsTagged()); | 1972 ASSERT(r.IsTagged()); |
| 1989 Register reg = ToRegister(instr->value()); | 1973 Register reg = ToRegister(instr->value()); |
| 1990 HType type = instr->hydrogen()->value()->type(); | 1974 HType type = instr->hydrogen()->value()->type(); |
| 1991 if (type.IsBoolean()) { | 1975 if (type.IsBoolean()) { |
| 1992 ASSERT(!info()->IsStub()); | 1976 ASSERT(!info()->IsStub()); |
| 1993 __ CompareRoot(reg, Heap::kTrueValueRootIndex); | 1977 __ CompareRoot(reg, Heap::kTrueValueRootIndex); |
| 1994 EmitBranch(instr, equal); | 1978 EmitBranch(instr, equal); |
| 1995 } else if (type.IsSmi()) { | 1979 } else if (type.IsSmi()) { |
| 1996 ASSERT(!info()->IsStub()); | 1980 ASSERT(!info()->IsStub()); |
| 1997 __ SmiCompare(reg, Smi::FromInt(0)); | 1981 __ SmiCompare(reg, Smi::FromInt(0)); |
| 1998 EmitBranch(instr, not_equal); | 1982 EmitBranch(instr, not_equal); |
| 1999 } else if (type.IsJSArray()) { | 1983 } else if (type.IsJSArray()) { |
| 2000 ASSERT(!info()->IsStub()); | 1984 ASSERT(!info()->IsStub()); |
| 2001 EmitBranch(instr, no_condition); | 1985 EmitBranch(instr, no_condition); |
| 2002 } else if (type.IsHeapNumber()) { | 1986 } else if (type.IsHeapNumber()) { |
| 2003 ASSERT(!info()->IsStub()); | 1987 ASSERT(!info()->IsStub()); |
| 2004 __ xorps(xmm0, xmm0); | 1988 XMMRegister xmm_scratch = double_scratch0(); |
| 2005 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); | 1989 __ xorps(xmm_scratch, xmm_scratch); |
| 1990 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); |
| 2006 EmitBranch(instr, not_equal); | 1991 EmitBranch(instr, not_equal); |
| 2007 } else if (type.IsString()) { | 1992 } else if (type.IsString()) { |
| 2008 ASSERT(!info()->IsStub()); | 1993 ASSERT(!info()->IsStub()); |
| 2009 __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0)); | 1994 __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0)); |
| 2010 EmitBranch(instr, not_equal); | 1995 EmitBranch(instr, not_equal); |
| 2011 } else { | 1996 } else { |
| 2012 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); | 1997 ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); |
| 2013 // Avoid deopts in the case where we've never executed this path before. | 1998 // Avoid deopts in the case where we've never executed this path before. |
| 2014 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); | 1999 if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); |
| 2015 | 2000 |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2076 // Symbol value -> true. | 2061 // Symbol value -> true. |
| 2077 __ CmpInstanceType(map, SYMBOL_TYPE); | 2062 __ CmpInstanceType(map, SYMBOL_TYPE); |
| 2078 __ j(equal, instr->TrueLabel(chunk_)); | 2063 __ j(equal, instr->TrueLabel(chunk_)); |
| 2079 } | 2064 } |
| 2080 | 2065 |
| 2081 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { | 2066 if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) { |
| 2082 // heap number -> false iff +0, -0, or NaN. | 2067 // heap number -> false iff +0, -0, or NaN. |
| 2083 Label not_heap_number; | 2068 Label not_heap_number; |
| 2084 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); | 2069 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex); |
| 2085 __ j(not_equal, ¬_heap_number, Label::kNear); | 2070 __ j(not_equal, ¬_heap_number, Label::kNear); |
| 2086 __ xorps(xmm0, xmm0); | 2071 XMMRegister xmm_scratch = double_scratch0(); |
| 2087 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); | 2072 __ xorps(xmm_scratch, xmm_scratch); |
| 2073 __ ucomisd(xmm_scratch, FieldOperand(reg, HeapNumber::kValueOffset)); |
| 2088 __ j(zero, instr->FalseLabel(chunk_)); | 2074 __ j(zero, instr->FalseLabel(chunk_)); |
| 2089 __ jmp(instr->TrueLabel(chunk_)); | 2075 __ jmp(instr->TrueLabel(chunk_)); |
| 2090 __ bind(¬_heap_number); | 2076 __ bind(¬_heap_number); |
| 2091 } | 2077 } |
| 2092 | 2078 |
| 2093 if (!expected.IsGeneric()) { | 2079 if (!expected.IsGeneric()) { |
| 2094 // We've seen something for the first time -> deopt. | 2080 // We've seen something for the first time -> deopt. |
| 2095 // This can only happen if we are not generic already. | 2081 // This can only happen if we are not generic already. |
| 2096 DeoptimizeIf(no_condition, instr->environment()); | 2082 DeoptimizeIf(no_condition, instr->environment()); |
| 2097 } | 2083 } |
| (...skipping 783 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2881 // Non-instance prototype: Fetch prototype from constructor field | 2867 // Non-instance prototype: Fetch prototype from constructor field |
| 2882 // in the function's map. | 2868 // in the function's map. |
| 2883 __ bind(&non_instance); | 2869 __ bind(&non_instance); |
| 2884 __ movq(result, FieldOperand(result, Map::kConstructorOffset)); | 2870 __ movq(result, FieldOperand(result, Map::kConstructorOffset)); |
| 2885 | 2871 |
| 2886 // All done. | 2872 // All done. |
| 2887 __ bind(&done); | 2873 __ bind(&done); |
| 2888 } | 2874 } |
| 2889 | 2875 |
| 2890 | 2876 |
| 2877 void LCodeGen::DoLoadRoot(LLoadRoot* instr) { |
| 2878 Register result = ToRegister(instr->result()); |
| 2879 __ LoadRoot(result, instr->index()); |
| 2880 } |
| 2881 |
| 2882 |
| 2891 void LCodeGen::DoLoadExternalArrayPointer( | 2883 void LCodeGen::DoLoadExternalArrayPointer( |
| 2892 LLoadExternalArrayPointer* instr) { | 2884 LLoadExternalArrayPointer* instr) { |
| 2893 Register result = ToRegister(instr->result()); | 2885 Register result = ToRegister(instr->result()); |
| 2894 Register input = ToRegister(instr->object()); | 2886 Register input = ToRegister(instr->object()); |
| 2895 __ movq(result, FieldOperand(input, | 2887 __ movq(result, FieldOperand(input, |
| 2896 ExternalPixelArray::kExternalPointerOffset)); | 2888 ExternalPixelArray::kExternalPointerOffset)); |
| 2897 } | 2889 } |
| 2898 | 2890 |
| 2899 | 2891 |
| 2900 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { | 2892 void LCodeGen::DoAccessArgumentsAt(LAccessArgumentsAt* instr) { |
| (...skipping 552 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3453 } | 3445 } |
| 3454 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } | 3446 virtual LInstruction* instr() V8_OVERRIDE { return instr_; } |
| 3455 private: | 3447 private: |
| 3456 LMathAbs* instr_; | 3448 LMathAbs* instr_; |
| 3457 }; | 3449 }; |
| 3458 | 3450 |
| 3459 ASSERT(instr->value()->Equals(instr->result())); | 3451 ASSERT(instr->value()->Equals(instr->result())); |
| 3460 Representation r = instr->hydrogen()->value()->representation(); | 3452 Representation r = instr->hydrogen()->value()->representation(); |
| 3461 | 3453 |
| 3462 if (r.IsDouble()) { | 3454 if (r.IsDouble()) { |
| 3463 XMMRegister scratch = xmm0; | 3455 XMMRegister scratch = double_scratch0(); |
| 3464 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3456 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3465 __ xorps(scratch, scratch); | 3457 __ xorps(scratch, scratch); |
| 3466 __ subsd(scratch, input_reg); | 3458 __ subsd(scratch, input_reg); |
| 3467 __ andpd(input_reg, scratch); | 3459 __ andpd(input_reg, scratch); |
| 3468 } else if (r.IsInteger32()) { | 3460 } else if (r.IsInteger32()) { |
| 3469 EmitIntegerMathAbs(instr); | 3461 EmitIntegerMathAbs(instr); |
| 3470 } else if (r.IsSmi()) { | 3462 } else if (r.IsSmi()) { |
| 3471 EmitSmiMathAbs(instr); | 3463 EmitSmiMathAbs(instr); |
| 3472 } else { // Tagged case. | 3464 } else { // Tagged case. |
| 3473 DeferredMathAbsTaggedHeapNumber* deferred = | 3465 DeferredMathAbsTaggedHeapNumber* deferred = |
| 3474 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); | 3466 new(zone()) DeferredMathAbsTaggedHeapNumber(this, instr); |
| 3475 Register input_reg = ToRegister(instr->value()); | 3467 Register input_reg = ToRegister(instr->value()); |
| 3476 // Smi check. | 3468 // Smi check. |
| 3477 __ JumpIfNotSmi(input_reg, deferred->entry()); | 3469 __ JumpIfNotSmi(input_reg, deferred->entry()); |
| 3478 EmitSmiMathAbs(instr); | 3470 EmitSmiMathAbs(instr); |
| 3479 __ bind(deferred->exit()); | 3471 __ bind(deferred->exit()); |
| 3480 } | 3472 } |
| 3481 } | 3473 } |
| 3482 | 3474 |
| 3483 | 3475 |
| 3484 void LCodeGen::DoMathFloor(LMathFloor* instr) { | 3476 void LCodeGen::DoMathFloor(LMathFloor* instr) { |
| 3485 XMMRegister xmm_scratch = xmm0; | 3477 XMMRegister xmm_scratch = double_scratch0(); |
| 3486 Register output_reg = ToRegister(instr->result()); | 3478 Register output_reg = ToRegister(instr->result()); |
| 3487 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3479 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3488 | 3480 |
| 3489 if (CpuFeatures::IsSupported(SSE4_1)) { | 3481 if (CpuFeatures::IsSupported(SSE4_1)) { |
| 3490 CpuFeatureScope scope(masm(), SSE4_1); | 3482 CpuFeatureScope scope(masm(), SSE4_1); |
| 3491 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 3483 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 3492 // Deoptimize if minus zero. | 3484 // Deoptimize if minus zero. |
| 3493 __ movq(output_reg, input_reg); | 3485 __ movq(output_reg, input_reg); |
| 3494 __ subq(output_reg, Immediate(1)); | 3486 __ subq(output_reg, Immediate(1)); |
| 3495 DeoptimizeIf(overflow, instr->environment()); | 3487 DeoptimizeIf(overflow, instr->environment()); |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3534 __ j(equal, &done, Label::kNear); | 3526 __ j(equal, &done, Label::kNear); |
| 3535 __ subl(output_reg, Immediate(1)); | 3527 __ subl(output_reg, Immediate(1)); |
| 3536 DeoptimizeIf(overflow, instr->environment()); | 3528 DeoptimizeIf(overflow, instr->environment()); |
| 3537 | 3529 |
| 3538 __ bind(&done); | 3530 __ bind(&done); |
| 3539 } | 3531 } |
| 3540 } | 3532 } |
| 3541 | 3533 |
| 3542 | 3534 |
| 3543 void LCodeGen::DoMathRound(LMathRound* instr) { | 3535 void LCodeGen::DoMathRound(LMathRound* instr) { |
| 3544 const XMMRegister xmm_scratch = xmm0; | 3536 const XMMRegister xmm_scratch = double_scratch0(); |
| 3545 Register output_reg = ToRegister(instr->result()); | 3537 Register output_reg = ToRegister(instr->result()); |
| 3546 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3538 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3547 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5 | 3539 static int64_t one_half = V8_INT64_C(0x3FE0000000000000); // 0.5 |
| 3548 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 | 3540 static int64_t minus_one_half = V8_INT64_C(0xBFE0000000000000); // -0.5 |
| 3549 | 3541 |
| 3550 Label done, round_to_zero, below_one_half, do_not_compensate, restore; | 3542 Label done, round_to_zero, below_one_half, do_not_compensate, restore; |
| 3551 __ movq(kScratchRegister, one_half, RelocInfo::NONE64); | 3543 __ movq(kScratchRegister, one_half, RelocInfo::NONE64); |
| 3552 __ movq(xmm_scratch, kScratchRegister); | 3544 __ movq(xmm_scratch, kScratchRegister); |
| 3553 __ ucomisd(xmm_scratch, input_reg); | 3545 __ ucomisd(xmm_scratch, input_reg); |
| 3554 __ j(above, &below_one_half); | 3546 __ j(above, &below_one_half); |
| (...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3602 | 3594 |
| 3603 | 3595 |
| 3604 void LCodeGen::DoMathSqrt(LMathSqrt* instr) { | 3596 void LCodeGen::DoMathSqrt(LMathSqrt* instr) { |
| 3605 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3597 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3606 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 3598 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
| 3607 __ sqrtsd(input_reg, input_reg); | 3599 __ sqrtsd(input_reg, input_reg); |
| 3608 } | 3600 } |
| 3609 | 3601 |
| 3610 | 3602 |
| 3611 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) { | 3603 void LCodeGen::DoMathPowHalf(LMathPowHalf* instr) { |
| 3612 XMMRegister xmm_scratch = xmm0; | 3604 XMMRegister xmm_scratch = double_scratch0(); |
| 3613 XMMRegister input_reg = ToDoubleRegister(instr->value()); | 3605 XMMRegister input_reg = ToDoubleRegister(instr->value()); |
| 3614 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); | 3606 ASSERT(ToDoubleRegister(instr->result()).is(input_reg)); |
| 3615 | 3607 |
| 3616 // Note that according to ECMA-262 15.8.2.13: | 3608 // Note that according to ECMA-262 15.8.2.13: |
| 3617 // Math.pow(-Infinity, 0.5) == Infinity | 3609 // Math.pow(-Infinity, 0.5) == Infinity |
| 3618 // Math.sqrt(-Infinity) == NaN | 3610 // Math.sqrt(-Infinity) == NaN |
| 3619 Label done, sqrt; | 3611 Label done, sqrt; |
| 3620 // Check base for -Infinity. According to IEEE-754, double-precision | 3612 // Check base for -Infinity. According to IEEE-754, double-precision |
| 3621 // -Infinity has the highest 12 bits set and the lowest 52 bits cleared. | 3613 // -Infinity has the highest 12 bits set and the lowest 52 bits cleared. |
| 3622 __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE64); | 3614 __ movq(kScratchRegister, V8_INT64_C(0xFFF0000000000000), RelocInfo::NONE64); |
| (...skipping 96 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3719 // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF) | 3711 // Random bit pattern = (state[0] << 14) + (state[1] & 0x3FFFF) |
| 3720 Register random = state0; | 3712 Register random = state0; |
| 3721 __ shll(random, Immediate(14)); | 3713 __ shll(random, Immediate(14)); |
| 3722 __ andl(state1, Immediate(0x3FFFF)); | 3714 __ andl(state1, Immediate(0x3FFFF)); |
| 3723 __ addl(random, state1); | 3715 __ addl(random, state1); |
| 3724 | 3716 |
| 3725 // Convert 32 random bits in rax to 0.(32 random bits) in a double | 3717 // Convert 32 random bits in rax to 0.(32 random bits) in a double |
| 3726 // by computing: | 3718 // by computing: |
| 3727 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). | 3719 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)). |
| 3728 XMMRegister result = ToDoubleRegister(instr->result()); | 3720 XMMRegister result = ToDoubleRegister(instr->result()); |
| 3729 // We use xmm0 as fixed scratch register here. | 3721 XMMRegister scratch4 = double_scratch0(); |
| 3730 XMMRegister scratch4 = xmm0; | |
| 3731 __ movq(scratch3, V8_INT64_C(0x4130000000000000), | 3722 __ movq(scratch3, V8_INT64_C(0x4130000000000000), |
| 3732 RelocInfo::NONE64); // 1.0 x 2^20 as double | 3723 RelocInfo::NONE64); // 1.0 x 2^20 as double |
| 3733 __ movq(scratch4, scratch3); | 3724 __ movq(scratch4, scratch3); |
| 3734 __ movd(result, random); | 3725 __ movd(result, random); |
| 3735 __ xorps(result, scratch4); | 3726 __ xorps(result, scratch4); |
| 3736 __ subsd(result, scratch4); | 3727 __ subsd(result, scratch4); |
| 3737 } | 3728 } |
| 3738 | 3729 |
| 3739 | 3730 |
| 3740 void LCodeGen::DoMathExp(LMathExp* instr) { | 3731 void LCodeGen::DoMathExp(LMathExp* instr) { |
| 3741 XMMRegister input = ToDoubleRegister(instr->value()); | 3732 XMMRegister input = ToDoubleRegister(instr->value()); |
| 3742 XMMRegister result = ToDoubleRegister(instr->result()); | 3733 XMMRegister result = ToDoubleRegister(instr->result()); |
| 3734 XMMRegister temp0 = double_scratch0(); |
| 3743 Register temp1 = ToRegister(instr->temp1()); | 3735 Register temp1 = ToRegister(instr->temp1()); |
| 3744 Register temp2 = ToRegister(instr->temp2()); | 3736 Register temp2 = ToRegister(instr->temp2()); |
| 3745 | 3737 |
| 3746 MathExpGenerator::EmitMathExp(masm(), input, result, xmm0, temp1, temp2); | 3738 MathExpGenerator::EmitMathExp(masm(), input, result, temp0, temp1, temp2); |
| 3747 } | 3739 } |
| 3748 | 3740 |
| 3749 | 3741 |
| 3750 void LCodeGen::DoMathLog(LMathLog* instr) { | 3742 void LCodeGen::DoMathLog(LMathLog* instr) { |
| 3751 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); | 3743 ASSERT(ToDoubleRegister(instr->result()).is(xmm1)); |
| 3752 TranscendentalCacheStub stub(TranscendentalCache::LOG, | 3744 TranscendentalCacheStub stub(TranscendentalCache::LOG, |
| 3753 TranscendentalCacheStub::UNTAGGED); | 3745 TranscendentalCacheStub::UNTAGGED); |
| 3754 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); | 3746 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); |
| 3755 } | 3747 } |
| 3756 | 3748 |
| (...skipping 780 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4537 Register reg = ToRegister(instr->value()); | 4529 Register reg = ToRegister(instr->value()); |
| 4538 Register tmp = reg.is(rax) ? rcx : rax; | 4530 Register tmp = reg.is(rax) ? rcx : rax; |
| 4539 | 4531 |
| 4540 // Preserve the value of all registers. | 4532 // Preserve the value of all registers. |
| 4541 PushSafepointRegistersScope scope(this); | 4533 PushSafepointRegistersScope scope(this); |
| 4542 | 4534 |
| 4543 Label done; | 4535 Label done; |
| 4544 // Load value into xmm1 which will be preserved across potential call to | 4536 // Load value into xmm1 which will be preserved across potential call to |
| 4545 // runtime (MacroAssembler::EnterExitFrameEpilogue preserves only allocatable | 4537 // runtime (MacroAssembler::EnterExitFrameEpilogue preserves only allocatable |
| 4546 // XMM registers on x64). | 4538 // XMM registers on x64). |
| 4547 __ LoadUint32(xmm1, reg, xmm0); | 4539 XMMRegister xmm_scratch = double_scratch0(); |
| 4540 __ LoadUint32(xmm1, reg, xmm_scratch); |
| 4548 | 4541 |
| 4549 if (FLAG_inline_new) { | 4542 if (FLAG_inline_new) { |
| 4550 __ AllocateHeapNumber(reg, tmp, &slow); | 4543 __ AllocateHeapNumber(reg, tmp, &slow); |
| 4551 __ jmp(&done, Label::kNear); | 4544 __ jmp(&done, Label::kNear); |
| 4552 } | 4545 } |
| 4553 | 4546 |
| 4554 // Slow case: Call the runtime system to do the number allocation. | 4547 // Slow case: Call the runtime system to do the number allocation. |
| 4555 __ bind(&slow); | 4548 __ bind(&slow); |
| 4556 | 4549 |
| 4557 // Put a valid pointer value in the stack slot where the result | 4550 // Put a valid pointer value in the stack slot where the result |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4656 // check, since all heap objects are at least two words long. | 4649 // check, since all heap objects are at least two words long. |
| 4657 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4650 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 4658 | 4651 |
| 4659 if (can_convert_undefined_to_nan) { | 4652 if (can_convert_undefined_to_nan) { |
| 4660 __ j(not_equal, &convert); | 4653 __ j(not_equal, &convert); |
| 4661 } else { | 4654 } else { |
| 4662 DeoptimizeIf(not_equal, env); | 4655 DeoptimizeIf(not_equal, env); |
| 4663 } | 4656 } |
| 4664 | 4657 |
| 4665 if (deoptimize_on_minus_zero) { | 4658 if (deoptimize_on_minus_zero) { |
| 4666 XMMRegister xmm_scratch = xmm0; | 4659 XMMRegister xmm_scratch = double_scratch0(); |
| 4667 __ xorps(xmm_scratch, xmm_scratch); | 4660 __ xorps(xmm_scratch, xmm_scratch); |
| 4668 __ ucomisd(xmm_scratch, result_reg); | 4661 __ ucomisd(xmm_scratch, result_reg); |
| 4669 __ j(not_equal, &done, Label::kNear); | 4662 __ j(not_equal, &done, Label::kNear); |
| 4670 __ movmskpd(kScratchRegister, result_reg); | 4663 __ movmskpd(kScratchRegister, result_reg); |
| 4671 __ testq(kScratchRegister, Immediate(1)); | 4664 __ testq(kScratchRegister, Immediate(1)); |
| 4672 DeoptimizeIf(not_zero, env); | 4665 DeoptimizeIf(not_zero, env); |
| 4673 } | 4666 } |
| 4674 __ jmp(&done, Label::kNear); | 4667 __ jmp(&done, Label::kNear); |
| 4675 | 4668 |
| 4676 if (can_convert_undefined_to_nan) { | 4669 if (can_convert_undefined_to_nan) { |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4784 LOperand* result = instr->result(); | 4777 LOperand* result = instr->result(); |
| 4785 ASSERT(result->IsRegister()); | 4778 ASSERT(result->IsRegister()); |
| 4786 | 4779 |
| 4787 XMMRegister input_reg = ToDoubleRegister(input); | 4780 XMMRegister input_reg = ToDoubleRegister(input); |
| 4788 Register result_reg = ToRegister(result); | 4781 Register result_reg = ToRegister(result); |
| 4789 | 4782 |
| 4790 if (instr->truncating()) { | 4783 if (instr->truncating()) { |
| 4791 __ TruncateDoubleToI(result_reg, input_reg); | 4784 __ TruncateDoubleToI(result_reg, input_reg); |
| 4792 } else { | 4785 } else { |
| 4793 Label bailout, done; | 4786 Label bailout, done; |
| 4794 __ DoubleToI(result_reg, input_reg, xmm0, | 4787 XMMRegister xmm_scratch = double_scratch0(); |
| 4788 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
| 4795 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); | 4789 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); |
| 4796 | 4790 |
| 4797 __ jmp(&done, Label::kNear); | 4791 __ jmp(&done, Label::kNear); |
| 4798 __ bind(&bailout); | 4792 __ bind(&bailout); |
| 4799 DeoptimizeIf(no_condition, instr->environment()); | 4793 DeoptimizeIf(no_condition, instr->environment()); |
| 4800 __ bind(&done); | 4794 __ bind(&done); |
| 4801 } | 4795 } |
| 4802 } | 4796 } |
| 4803 | 4797 |
| 4804 | 4798 |
| 4805 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { | 4799 void LCodeGen::DoDoubleToSmi(LDoubleToSmi* instr) { |
| 4806 LOperand* input = instr->value(); | 4800 LOperand* input = instr->value(); |
| 4807 ASSERT(input->IsDoubleRegister()); | 4801 ASSERT(input->IsDoubleRegister()); |
| 4808 LOperand* result = instr->result(); | 4802 LOperand* result = instr->result(); |
| 4809 ASSERT(result->IsRegister()); | 4803 ASSERT(result->IsRegister()); |
| 4810 | 4804 |
| 4811 XMMRegister input_reg = ToDoubleRegister(input); | 4805 XMMRegister input_reg = ToDoubleRegister(input); |
| 4812 Register result_reg = ToRegister(result); | 4806 Register result_reg = ToRegister(result); |
| 4813 | 4807 |
| 4814 Label bailout, done; | 4808 Label bailout, done; |
| 4815 __ DoubleToI(result_reg, input_reg, xmm0, | 4809 XMMRegister xmm_scratch = double_scratch0(); |
| 4810 __ DoubleToI(result_reg, input_reg, xmm_scratch, |
| 4816 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); | 4811 instr->hydrogen()->GetMinusZeroMode(), &bailout, Label::kNear); |
| 4817 | 4812 |
| 4818 __ jmp(&done, Label::kNear); | 4813 __ jmp(&done, Label::kNear); |
| 4819 __ bind(&bailout); | 4814 __ bind(&bailout); |
| 4820 DeoptimizeIf(no_condition, instr->environment()); | 4815 DeoptimizeIf(no_condition, instr->environment()); |
| 4821 __ bind(&done); | 4816 __ bind(&done); |
| 4822 | 4817 |
| 4823 __ Integer32ToSmi(result_reg, result_reg); | 4818 __ Integer32ToSmi(result_reg, result_reg); |
| 4824 DeoptimizeIf(overflow, instr->environment()); | 4819 DeoptimizeIf(overflow, instr->environment()); |
| 4825 } | 4820 } |
| (...skipping 56 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4882 __ andb(kScratchRegister, Immediate(mask)); | 4877 __ andb(kScratchRegister, Immediate(mask)); |
| 4883 __ cmpb(kScratchRegister, Immediate(tag)); | 4878 __ cmpb(kScratchRegister, Immediate(tag)); |
| 4884 DeoptimizeIf(not_equal, instr->environment()); | 4879 DeoptimizeIf(not_equal, instr->environment()); |
| 4885 } | 4880 } |
| 4886 } | 4881 } |
| 4887 } | 4882 } |
| 4888 | 4883 |
| 4889 | 4884 |
| 4890 void LCodeGen::DoCheckValue(LCheckValue* instr) { | 4885 void LCodeGen::DoCheckValue(LCheckValue* instr) { |
| 4891 Register reg = ToRegister(instr->value()); | 4886 Register reg = ToRegister(instr->value()); |
| 4892 Handle<HeapObject> object = instr->hydrogen()->object(); | 4887 Handle<HeapObject> object = instr->hydrogen()->object().handle(); |
| 4893 __ CmpHeapObject(reg, object); | 4888 __ CmpHeapObject(reg, object); |
| 4894 DeoptimizeIf(not_equal, instr->environment()); | 4889 DeoptimizeIf(not_equal, instr->environment()); |
| 4895 } | 4890 } |
| 4896 | 4891 |
| 4897 | 4892 |
| 4898 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { | 4893 void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) { |
| 4899 { | 4894 { |
| 4900 PushSafepointRegistersScope scope(this); | 4895 PushSafepointRegistersScope scope(this); |
| 4901 __ push(object); | 4896 __ push(object); |
| 4902 CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr); | 4897 CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 4923 Label check_maps_; | 4918 Label check_maps_; |
| 4924 Register object_; | 4919 Register object_; |
| 4925 }; | 4920 }; |
| 4926 | 4921 |
| 4927 if (instr->hydrogen()->CanOmitMapChecks()) return; | 4922 if (instr->hydrogen()->CanOmitMapChecks()) return; |
| 4928 | 4923 |
| 4929 LOperand* input = instr->value(); | 4924 LOperand* input = instr->value(); |
| 4930 ASSERT(input->IsRegister()); | 4925 ASSERT(input->IsRegister()); |
| 4931 Register reg = ToRegister(input); | 4926 Register reg = ToRegister(input); |
| 4932 | 4927 |
| 4933 SmallMapList* map_set = instr->hydrogen()->map_set(); | |
| 4934 | |
| 4935 DeferredCheckMaps* deferred = NULL; | 4928 DeferredCheckMaps* deferred = NULL; |
| 4936 if (instr->hydrogen()->has_migration_target()) { | 4929 if (instr->hydrogen()->has_migration_target()) { |
| 4937 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); | 4930 deferred = new(zone()) DeferredCheckMaps(this, instr, reg); |
| 4938 __ bind(deferred->check_maps()); | 4931 __ bind(deferred->check_maps()); |
| 4939 } | 4932 } |
| 4940 | 4933 |
| 4934 UniqueSet<Map> map_set = instr->hydrogen()->map_set(); |
| 4941 Label success; | 4935 Label success; |
| 4942 for (int i = 0; i < map_set->length() - 1; i++) { | 4936 for (int i = 0; i < map_set.size() - 1; i++) { |
| 4943 Handle<Map> map = map_set->at(i); | 4937 Handle<Map> map = map_set.at(i).handle(); |
| 4944 __ CompareMap(reg, map, &success); | 4938 __ CompareMap(reg, map, &success); |
| 4945 __ j(equal, &success); | 4939 __ j(equal, &success); |
| 4946 } | 4940 } |
| 4947 | 4941 |
| 4948 Handle<Map> map = map_set->last(); | 4942 Handle<Map> map = map_set.at(map_set.size() - 1).handle(); |
| 4949 __ CompareMap(reg, map, &success); | 4943 __ CompareMap(reg, map, &success); |
| 4950 if (instr->hydrogen()->has_migration_target()) { | 4944 if (instr->hydrogen()->has_migration_target()) { |
| 4951 __ j(not_equal, deferred->entry()); | 4945 __ j(not_equal, deferred->entry()); |
| 4952 } else { | 4946 } else { |
| 4953 DeoptimizeIf(not_equal, instr->environment()); | 4947 DeoptimizeIf(not_equal, instr->environment()); |
| 4954 } | 4948 } |
| 4955 | 4949 |
| 4956 __ bind(&success); | 4950 __ bind(&success); |
| 4957 } | 4951 } |
| 4958 | 4952 |
| 4959 | 4953 |
| 4960 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { | 4954 void LCodeGen::DoClampDToUint8(LClampDToUint8* instr) { |
| 4961 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); | 4955 XMMRegister value_reg = ToDoubleRegister(instr->unclamped()); |
| 4956 XMMRegister xmm_scratch = double_scratch0(); |
| 4962 Register result_reg = ToRegister(instr->result()); | 4957 Register result_reg = ToRegister(instr->result()); |
| 4963 __ ClampDoubleToUint8(value_reg, xmm0, result_reg); | 4958 __ ClampDoubleToUint8(value_reg, xmm_scratch, result_reg); |
| 4964 } | 4959 } |
| 4965 | 4960 |
| 4966 | 4961 |
| 4967 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { | 4962 void LCodeGen::DoClampIToUint8(LClampIToUint8* instr) { |
| 4968 ASSERT(instr->unclamped()->Equals(instr->result())); | 4963 ASSERT(instr->unclamped()->Equals(instr->result())); |
| 4969 Register value_reg = ToRegister(instr->result()); | 4964 Register value_reg = ToRegister(instr->result()); |
| 4970 __ ClampUint8(value_reg); | 4965 __ ClampUint8(value_reg); |
| 4971 } | 4966 } |
| 4972 | 4967 |
| 4973 | 4968 |
| 4974 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { | 4969 void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) { |
| 4975 ASSERT(instr->unclamped()->Equals(instr->result())); | 4970 ASSERT(instr->unclamped()->Equals(instr->result())); |
| 4976 Register input_reg = ToRegister(instr->unclamped()); | 4971 Register input_reg = ToRegister(instr->unclamped()); |
| 4977 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); | 4972 XMMRegister temp_xmm_reg = ToDoubleRegister(instr->temp_xmm()); |
| 4973 XMMRegister xmm_scratch = double_scratch0(); |
| 4978 Label is_smi, done, heap_number; | 4974 Label is_smi, done, heap_number; |
| 4979 | 4975 |
| 4980 __ JumpIfSmi(input_reg, &is_smi); | 4976 __ JumpIfSmi(input_reg, &is_smi); |
| 4981 | 4977 |
| 4982 // Check for heap number | 4978 // Check for heap number |
| 4983 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), | 4979 __ Cmp(FieldOperand(input_reg, HeapObject::kMapOffset), |
| 4984 factory()->heap_number_map()); | 4980 factory()->heap_number_map()); |
| 4985 __ j(equal, &heap_number, Label::kNear); | 4981 __ j(equal, &heap_number, Label::kNear); |
| 4986 | 4982 |
| 4987 // Check for undefined. Undefined is converted to zero for clamping | 4983 // Check for undefined. Undefined is converted to zero for clamping |
| 4988 // conversions. | 4984 // conversions. |
| 4989 __ Cmp(input_reg, factory()->undefined_value()); | 4985 __ Cmp(input_reg, factory()->undefined_value()); |
| 4990 DeoptimizeIf(not_equal, instr->environment()); | 4986 DeoptimizeIf(not_equal, instr->environment()); |
| 4991 __ movq(input_reg, Immediate(0)); | 4987 __ movq(input_reg, Immediate(0)); |
| 4992 __ jmp(&done, Label::kNear); | 4988 __ jmp(&done, Label::kNear); |
| 4993 | 4989 |
| 4994 // Heap number | 4990 // Heap number |
| 4995 __ bind(&heap_number); | 4991 __ bind(&heap_number); |
| 4996 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); | 4992 __ movsd(xmm_scratch, FieldOperand(input_reg, HeapNumber::kValueOffset)); |
| 4997 __ ClampDoubleToUint8(xmm0, temp_xmm_reg, input_reg); | 4993 __ ClampDoubleToUint8(xmm_scratch, temp_xmm_reg, input_reg); |
| 4998 __ jmp(&done, Label::kNear); | 4994 __ jmp(&done, Label::kNear); |
| 4999 | 4995 |
| 5000 // smi | 4996 // smi |
| 5001 __ bind(&is_smi); | 4997 __ bind(&is_smi); |
| 5002 __ SmiToInteger32(input_reg, input_reg); | 4998 __ SmiToInteger32(input_reg, input_reg); |
| 5003 __ ClampUint8(input_reg); | 4999 __ ClampUint8(input_reg); |
| 5004 | 5000 |
| 5005 __ bind(&done); | 5001 __ bind(&done); |
| 5006 } | 5002 } |
| 5007 | 5003 |
| (...skipping 512 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5520 FixedArray::kHeaderSize - kPointerSize)); | 5516 FixedArray::kHeaderSize - kPointerSize)); |
| 5521 __ bind(&done); | 5517 __ bind(&done); |
| 5522 } | 5518 } |
| 5523 | 5519 |
| 5524 | 5520 |
| 5525 #undef __ | 5521 #undef __ |
| 5526 | 5522 |
| 5527 } } // namespace v8::internal | 5523 } } // namespace v8::internal |
| 5528 | 5524 |
| 5529 #endif // V8_TARGET_ARCH_X64 | 5525 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |