OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 991 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1002 Register heap_number_map = r8; | 1002 Register heap_number_map = r8; |
1003 Register scratch1 = r9; | 1003 Register scratch1 = r9; |
1004 Register scratch2 = r10; | 1004 Register scratch2 = r10; |
1005 // HeapNumbers containing 32bit integer values are also allowed. | 1005 // HeapNumbers containing 32bit integer values are also allowed. |
1006 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 1006 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
1007 __ cmpq(FieldOperand(input, HeapObject::kMapOffset), heap_number_map); | 1007 __ cmpq(FieldOperand(input, HeapObject::kMapOffset), heap_number_map); |
1008 __ j(not_equal, fail); | 1008 __ j(not_equal, fail); |
1009 __ movsd(xmm0, FieldOperand(input, HeapNumber::kValueOffset)); | 1009 __ movsd(xmm0, FieldOperand(input, HeapNumber::kValueOffset)); |
1010 // Convert, convert back, and compare the two doubles' bits. | 1010 // Convert, convert back, and compare the two doubles' bits. |
1011 __ cvttsd2siq(scratch2, xmm0); | 1011 __ cvttsd2siq(scratch2, xmm0); |
1012 __ cvtlsi2sd(xmm1, scratch2); | 1012 __ Cvtlsi2sd(xmm1, scratch2); |
1013 __ movq(scratch1, xmm0); | 1013 __ movq(scratch1, xmm0); |
1014 __ movq(scratch2, xmm1); | 1014 __ movq(scratch2, xmm1); |
1015 __ cmpq(scratch1, scratch2); | 1015 __ cmpq(scratch1, scratch2); |
1016 __ j(not_equal, fail); | 1016 __ j(not_equal, fail); |
1017 __ bind(&ok); | 1017 __ bind(&ok); |
1018 } | 1018 } |
1019 | 1019 |
1020 | 1020 |
1021 void BinaryOpStub::GenerateNumberStub(MacroAssembler* masm) { | 1021 void BinaryOpStub::GenerateNumberStub(MacroAssembler* masm) { |
1022 Label gc_required, not_number; | 1022 Label gc_required, not_number; |
(...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1138 Label input_not_smi, loaded; | 1138 Label input_not_smi, loaded; |
1139 | 1139 |
1140 // Test that rax is a number. | 1140 // Test that rax is a number. |
1141 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1141 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1142 __ movq(rax, args.GetArgumentOperand(0)); | 1142 __ movq(rax, args.GetArgumentOperand(0)); |
1143 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); | 1143 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); |
1144 // Input is a smi. Untag and load it onto the FPU stack. | 1144 // Input is a smi. Untag and load it onto the FPU stack. |
1145 // Then load the bits of the double into rbx. | 1145 // Then load the bits of the double into rbx. |
1146 __ SmiToInteger32(rax, rax); | 1146 __ SmiToInteger32(rax, rax); |
1147 __ subq(rsp, Immediate(kDoubleSize)); | 1147 __ subq(rsp, Immediate(kDoubleSize)); |
1148 __ cvtlsi2sd(xmm1, rax); | 1148 __ Cvtlsi2sd(xmm1, rax); |
1149 __ movsd(Operand(rsp, 0), xmm1); | 1149 __ movsd(Operand(rsp, 0), xmm1); |
1150 __ movq(rbx, xmm1); | 1150 __ movq(rbx, xmm1); |
1151 __ movq(rdx, xmm1); | 1151 __ movq(rdx, xmm1); |
1152 __ fld_d(Operand(rsp, 0)); | 1152 __ fld_d(Operand(rsp, 0)); |
1153 __ addq(rsp, Immediate(kDoubleSize)); | 1153 __ addq(rsp, Immediate(kDoubleSize)); |
1154 __ jmp(&loaded, Label::kNear); | 1154 __ jmp(&loaded, Label::kNear); |
1155 | 1155 |
1156 __ bind(&input_not_smi); | 1156 __ bind(&input_not_smi); |
1157 // Check if input is a HeapNumber. | 1157 // Check if input is a HeapNumber. |
1158 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex); | 1158 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex); |
(...skipping 311 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1470 // Get the untagged integer version of the rax heap number in rcx. | 1470 // Get the untagged integer version of the rax heap number in rcx. |
1471 __ TruncateHeapNumberToI(rcx, rax); | 1471 __ TruncateHeapNumberToI(rcx, rax); |
1472 | 1472 |
1473 __ bind(&done); | 1473 __ bind(&done); |
1474 __ movl(rax, r8); | 1474 __ movl(rax, r8); |
1475 } | 1475 } |
1476 | 1476 |
1477 | 1477 |
1478 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) { | 1478 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) { |
1479 __ SmiToInteger32(kScratchRegister, rdx); | 1479 __ SmiToInteger32(kScratchRegister, rdx); |
1480 __ cvtlsi2sd(xmm0, kScratchRegister); | 1480 __ Cvtlsi2sd(xmm0, kScratchRegister); |
1481 __ SmiToInteger32(kScratchRegister, rax); | 1481 __ SmiToInteger32(kScratchRegister, rax); |
1482 __ cvtlsi2sd(xmm1, kScratchRegister); | 1482 __ Cvtlsi2sd(xmm1, kScratchRegister); |
1483 } | 1483 } |
1484 | 1484 |
1485 | 1485 |
1486 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, | 1486 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, |
1487 Label* not_numbers) { | 1487 Label* not_numbers) { |
1488 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; | 1488 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; |
1489 // Load operand in rdx into xmm0, or branch to not_numbers. | 1489 // Load operand in rdx into xmm0, or branch to not_numbers. |
1490 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); | 1490 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); |
1491 __ JumpIfSmi(rdx, &load_smi_rdx); | 1491 __ JumpIfSmi(rdx, &load_smi_rdx); |
1492 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx); | 1492 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx); |
1493 __ j(not_equal, not_numbers); // Argument in rdx is not a number. | 1493 __ j(not_equal, not_numbers); // Argument in rdx is not a number. |
1494 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | 1494 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); |
1495 // Load operand in rax into xmm1, or branch to not_numbers. | 1495 // Load operand in rax into xmm1, or branch to not_numbers. |
1496 __ JumpIfSmi(rax, &load_smi_rax); | 1496 __ JumpIfSmi(rax, &load_smi_rax); |
1497 | 1497 |
1498 __ bind(&load_nonsmi_rax); | 1498 __ bind(&load_nonsmi_rax); |
1499 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx); | 1499 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx); |
1500 __ j(not_equal, not_numbers); | 1500 __ j(not_equal, not_numbers); |
1501 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | 1501 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
1502 __ jmp(&done); | 1502 __ jmp(&done); |
1503 | 1503 |
1504 __ bind(&load_smi_rdx); | 1504 __ bind(&load_smi_rdx); |
1505 __ SmiToInteger32(kScratchRegister, rdx); | 1505 __ SmiToInteger32(kScratchRegister, rdx); |
1506 __ cvtlsi2sd(xmm0, kScratchRegister); | 1506 __ Cvtlsi2sd(xmm0, kScratchRegister); |
1507 __ JumpIfNotSmi(rax, &load_nonsmi_rax); | 1507 __ JumpIfNotSmi(rax, &load_nonsmi_rax); |
1508 | 1508 |
1509 __ bind(&load_smi_rax); | 1509 __ bind(&load_smi_rax); |
1510 __ SmiToInteger32(kScratchRegister, rax); | 1510 __ SmiToInteger32(kScratchRegister, rax); |
1511 __ cvtlsi2sd(xmm1, kScratchRegister); | 1511 __ Cvtlsi2sd(xmm1, kScratchRegister); |
1512 __ bind(&done); | 1512 __ bind(&done); |
1513 } | 1513 } |
1514 | 1514 |
1515 | 1515 |
1516 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm, | 1516 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm, |
1517 Register first, | 1517 Register first, |
1518 Register second, | 1518 Register second, |
1519 Register scratch1, | 1519 Register scratch1, |
1520 Register scratch2, | 1520 Register scratch2, |
1521 Register scratch3, | 1521 Register scratch3, |
(...skipping 12 matching lines...) Expand all Loading... |
1534 __ j(not_equal, | 1534 __ j(not_equal, |
1535 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO) | 1535 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO) |
1536 ? &maybe_undefined_first | 1536 ? &maybe_undefined_first |
1537 : on_not_smis); | 1537 : on_not_smis); |
1538 // Convert HeapNumber to smi if possible. | 1538 // Convert HeapNumber to smi if possible. |
1539 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); | 1539 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); |
1540 __ movq(scratch2, xmm0); | 1540 __ movq(scratch2, xmm0); |
1541 __ cvttsd2siq(smi_result, xmm0); | 1541 __ cvttsd2siq(smi_result, xmm0); |
1542 // Check if conversion was successful by converting back and | 1542 // Check if conversion was successful by converting back and |
1543 // comparing to the original double's bits. | 1543 // comparing to the original double's bits. |
1544 __ cvtlsi2sd(xmm1, smi_result); | 1544 __ Cvtlsi2sd(xmm1, smi_result); |
1545 __ movq(kScratchRegister, xmm1); | 1545 __ movq(kScratchRegister, xmm1); |
1546 __ cmpq(scratch2, kScratchRegister); | 1546 __ cmpq(scratch2, kScratchRegister); |
1547 __ j(not_equal, on_not_smis); | 1547 __ j(not_equal, on_not_smis); |
1548 __ Integer32ToSmi(first, smi_result); | 1548 __ Integer32ToSmi(first, smi_result); |
1549 | 1549 |
1550 __ bind(&first_done); | 1550 __ bind(&first_done); |
1551 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); | 1551 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); |
1552 __ bind(&first_smi); | 1552 __ bind(&first_smi); |
1553 __ AssertNotSmi(second); | 1553 __ AssertNotSmi(second); |
1554 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); | 1554 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); |
1555 __ j(not_equal, | 1555 __ j(not_equal, |
1556 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO) | 1556 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO) |
1557 ? &maybe_undefined_second | 1557 ? &maybe_undefined_second |
1558 : on_not_smis); | 1558 : on_not_smis); |
1559 // Convert second to smi, if possible. | 1559 // Convert second to smi, if possible. |
1560 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); | 1560 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); |
1561 __ movq(scratch2, xmm0); | 1561 __ movq(scratch2, xmm0); |
1562 __ cvttsd2siq(smi_result, xmm0); | 1562 __ cvttsd2siq(smi_result, xmm0); |
1563 __ cvtlsi2sd(xmm1, smi_result); | 1563 __ Cvtlsi2sd(xmm1, smi_result); |
1564 __ movq(kScratchRegister, xmm1); | 1564 __ movq(kScratchRegister, xmm1); |
1565 __ cmpq(scratch2, kScratchRegister); | 1565 __ cmpq(scratch2, kScratchRegister); |
1566 __ j(not_equal, on_not_smis); | 1566 __ j(not_equal, on_not_smis); |
1567 __ Integer32ToSmi(second, smi_result); | 1567 __ Integer32ToSmi(second, smi_result); |
1568 if (on_success != NULL) { | 1568 if (on_success != NULL) { |
1569 __ jmp(on_success); | 1569 __ jmp(on_success); |
1570 } else { | 1570 } else { |
1571 __ jmp(&done); | 1571 __ jmp(&done); |
1572 } | 1572 } |
1573 | 1573 |
(...skipping 22 matching lines...) Expand all Loading... |
1596 const Register scratch = rcx; | 1596 const Register scratch = rcx; |
1597 const XMMRegister double_result = xmm3; | 1597 const XMMRegister double_result = xmm3; |
1598 const XMMRegister double_base = xmm2; | 1598 const XMMRegister double_base = xmm2; |
1599 const XMMRegister double_exponent = xmm1; | 1599 const XMMRegister double_exponent = xmm1; |
1600 const XMMRegister double_scratch = xmm4; | 1600 const XMMRegister double_scratch = xmm4; |
1601 | 1601 |
1602 Label call_runtime, done, exponent_not_smi, int_exponent; | 1602 Label call_runtime, done, exponent_not_smi, int_exponent; |
1603 | 1603 |
1604 // Save 1 in double_result - we need this several times later on. | 1604 // Save 1 in double_result - we need this several times later on. |
1605 __ movq(scratch, Immediate(1)); | 1605 __ movq(scratch, Immediate(1)); |
1606 __ cvtlsi2sd(double_result, scratch); | 1606 __ Cvtlsi2sd(double_result, scratch); |
1607 | 1607 |
1608 if (exponent_type_ == ON_STACK) { | 1608 if (exponent_type_ == ON_STACK) { |
1609 Label base_is_smi, unpack_exponent; | 1609 Label base_is_smi, unpack_exponent; |
1610 // The exponent and base are supplied as arguments on the stack. | 1610 // The exponent and base are supplied as arguments on the stack. |
1611 // This can only happen if the stub is called from non-optimized code. | 1611 // This can only happen if the stub is called from non-optimized code. |
1612 // Load input parameters from stack. | 1612 // Load input parameters from stack. |
1613 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); | 1613 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); |
1614 __ movq(base, args.GetArgumentOperand(0)); | 1614 __ movq(base, args.GetArgumentOperand(0)); |
1615 __ movq(exponent, args.GetArgumentOperand(1)); | 1615 __ movq(exponent, args.GetArgumentOperand(1)); |
1616 __ JumpIfSmi(base, &base_is_smi, Label::kNear); | 1616 __ JumpIfSmi(base, &base_is_smi, Label::kNear); |
1617 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), | 1617 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), |
1618 Heap::kHeapNumberMapRootIndex); | 1618 Heap::kHeapNumberMapRootIndex); |
1619 __ j(not_equal, &call_runtime); | 1619 __ j(not_equal, &call_runtime); |
1620 | 1620 |
1621 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); | 1621 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); |
1622 __ jmp(&unpack_exponent, Label::kNear); | 1622 __ jmp(&unpack_exponent, Label::kNear); |
1623 | 1623 |
1624 __ bind(&base_is_smi); | 1624 __ bind(&base_is_smi); |
1625 __ SmiToInteger32(base, base); | 1625 __ SmiToInteger32(base, base); |
1626 __ cvtlsi2sd(double_base, base); | 1626 __ Cvtlsi2sd(double_base, base); |
1627 __ bind(&unpack_exponent); | 1627 __ bind(&unpack_exponent); |
1628 | 1628 |
1629 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); | 1629 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); |
1630 __ SmiToInteger32(exponent, exponent); | 1630 __ SmiToInteger32(exponent, exponent); |
1631 __ jmp(&int_exponent); | 1631 __ jmp(&int_exponent); |
1632 | 1632 |
1633 __ bind(&exponent_not_smi); | 1633 __ bind(&exponent_not_smi); |
1634 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset), | 1634 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset), |
1635 Heap::kHeapNumberMapRootIndex); | 1635 Heap::kHeapNumberMapRootIndex); |
1636 __ j(not_equal, &call_runtime); | 1636 __ j(not_equal, &call_runtime); |
(...skipping 168 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1805 __ divsd(double_scratch2, double_result); | 1805 __ divsd(double_scratch2, double_result); |
1806 __ movsd(double_result, double_scratch2); | 1806 __ movsd(double_result, double_scratch2); |
1807 // Test whether result is zero. Bail out to check for subnormal result. | 1807 // Test whether result is zero. Bail out to check for subnormal result. |
1808 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. | 1808 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. |
1809 __ xorps(double_scratch2, double_scratch2); | 1809 __ xorps(double_scratch2, double_scratch2); |
1810 __ ucomisd(double_scratch2, double_result); | 1810 __ ucomisd(double_scratch2, double_result); |
1811 // double_exponent aliased as double_scratch2 has already been overwritten | 1811 // double_exponent aliased as double_scratch2 has already been overwritten |
1812 // and may not have contained the exponent value in the first place when the | 1812 // and may not have contained the exponent value in the first place when the |
1813 // input was a smi. We reset it with exponent value before bailing out. | 1813 // input was a smi. We reset it with exponent value before bailing out. |
1814 __ j(not_equal, &done); | 1814 __ j(not_equal, &done); |
1815 __ cvtlsi2sd(double_exponent, exponent); | 1815 __ Cvtlsi2sd(double_exponent, exponent); |
1816 | 1816 |
1817 // Returning or bailing out. | 1817 // Returning or bailing out. |
1818 Counters* counters = masm->isolate()->counters(); | 1818 Counters* counters = masm->isolate()->counters(); |
1819 if (exponent_type_ == ON_STACK) { | 1819 if (exponent_type_ == ON_STACK) { |
1820 // The arguments are still on the stack. | 1820 // The arguments are still on the stack. |
1821 __ bind(&call_runtime); | 1821 __ bind(&call_runtime); |
1822 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); | 1822 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); |
1823 | 1823 |
1824 // The stub is called from non-optimized code, which expects the result | 1824 // The stub is called from non-optimized code, which expects the result |
1825 // as heap number in rax. | 1825 // as heap number in rax. |
(...skipping 3557 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5383 | 5383 |
5384 // Load left and right operand. | 5384 // Load left and right operand. |
5385 Label done, left, left_smi, right_smi; | 5385 Label done, left, left_smi, right_smi; |
5386 __ JumpIfSmi(rax, &right_smi, Label::kNear); | 5386 __ JumpIfSmi(rax, &right_smi, Label::kNear); |
5387 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map(), NULL); | 5387 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map(), NULL); |
5388 __ j(not_equal, &maybe_undefined1, Label::kNear); | 5388 __ j(not_equal, &maybe_undefined1, Label::kNear); |
5389 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); | 5389 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
5390 __ jmp(&left, Label::kNear); | 5390 __ jmp(&left, Label::kNear); |
5391 __ bind(&right_smi); | 5391 __ bind(&right_smi); |
5392 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. | 5392 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. |
5393 __ cvtlsi2sd(xmm1, rcx); | 5393 __ Cvtlsi2sd(xmm1, rcx); |
5394 | 5394 |
5395 __ bind(&left); | 5395 __ bind(&left); |
5396 __ JumpIfSmi(rdx, &left_smi, Label::kNear); | 5396 __ JumpIfSmi(rdx, &left_smi, Label::kNear); |
5397 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map(), NULL); | 5397 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map(), NULL); |
5398 __ j(not_equal, &maybe_undefined2, Label::kNear); | 5398 __ j(not_equal, &maybe_undefined2, Label::kNear); |
5399 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); | 5399 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); |
5400 __ jmp(&done); | 5400 __ jmp(&done); |
5401 __ bind(&left_smi); | 5401 __ bind(&left_smi); |
5402 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. | 5402 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. |
5403 __ cvtlsi2sd(xmm0, rcx); | 5403 __ Cvtlsi2sd(xmm0, rcx); |
5404 | 5404 |
5405 __ bind(&done); | 5405 __ bind(&done); |
5406 // Compare operands | 5406 // Compare operands |
5407 __ ucomisd(xmm0, xmm1); | 5407 __ ucomisd(xmm0, xmm1); |
5408 | 5408 |
5409 // Don't base result on EFLAGS when a NaN is involved. | 5409 // Don't base result on EFLAGS when a NaN is involved. |
5410 __ j(parity_even, &unordered, Label::kNear); | 5410 __ j(parity_even, &unordered, Label::kNear); |
5411 | 5411 |
5412 // Return a result of -1, 0, or 1, based on EFLAGS. | 5412 // Return a result of -1, 0, or 1, based on EFLAGS. |
5413 // Performing mov, because xor would destroy the flag register. | 5413 // Performing mov, because xor would destroy the flag register. |
(...skipping 1205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6619 __ bind(&fast_elements_case); | 6619 __ bind(&fast_elements_case); |
6620 GenerateCase(masm, FAST_ELEMENTS); | 6620 GenerateCase(masm, FAST_ELEMENTS); |
6621 } | 6621 } |
6622 | 6622 |
6623 | 6623 |
6624 #undef __ | 6624 #undef __ |
6625 | 6625 |
6626 } } // namespace v8::internal | 6626 } } // namespace v8::internal |
6627 | 6627 |
6628 #endif // V8_TARGET_ARCH_X64 | 6628 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |