Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/x64/code-stubs-x64.cc

Issue 23654026: Use xorps to break the cvtsi2sd unnecessary dependence due to its partially written (Closed) Base URL: git://github.com/v8/v8.git@master
Patch Set: x64 port Created 7 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 the V8 project authors. All rights reserved. 1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 991 matching lines...) Expand 10 before | Expand all | Expand 10 after
1002 Register heap_number_map = r8; 1002 Register heap_number_map = r8;
1003 Register scratch1 = r9; 1003 Register scratch1 = r9;
1004 Register scratch2 = r10; 1004 Register scratch2 = r10;
1005 // HeapNumbers containing 32bit integer values are also allowed. 1005 // HeapNumbers containing 32bit integer values are also allowed.
1006 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); 1006 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
1007 __ cmpq(FieldOperand(input, HeapObject::kMapOffset), heap_number_map); 1007 __ cmpq(FieldOperand(input, HeapObject::kMapOffset), heap_number_map);
1008 __ j(not_equal, fail); 1008 __ j(not_equal, fail);
1009 __ movsd(xmm0, FieldOperand(input, HeapNumber::kValueOffset)); 1009 __ movsd(xmm0, FieldOperand(input, HeapNumber::kValueOffset));
1010 // Convert, convert back, and compare the two doubles' bits. 1010 // Convert, convert back, and compare the two doubles' bits.
1011 __ cvttsd2siq(scratch2, xmm0); 1011 __ cvttsd2siq(scratch2, xmm0);
1012 __ cvtlsi2sd(xmm1, scratch2); 1012 __ Cvtlsi2sd(xmm1, scratch2);
1013 __ movq(scratch1, xmm0); 1013 __ movq(scratch1, xmm0);
1014 __ movq(scratch2, xmm1); 1014 __ movq(scratch2, xmm1);
1015 __ cmpq(scratch1, scratch2); 1015 __ cmpq(scratch1, scratch2);
1016 __ j(not_equal, fail); 1016 __ j(not_equal, fail);
1017 __ bind(&ok); 1017 __ bind(&ok);
1018 } 1018 }
1019 1019
1020 1020
1021 void BinaryOpStub::GenerateNumberStub(MacroAssembler* masm) { 1021 void BinaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
1022 Label gc_required, not_number; 1022 Label gc_required, not_number;
(...skipping 114 matching lines...) Expand 10 before | Expand all | Expand 10 after
1137 if (tagged) { 1137 if (tagged) {
1138 Label input_not_smi, loaded; 1138 Label input_not_smi, loaded;
1139 // Test that rax is a number. 1139 // Test that rax is a number.
1140 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER); 1140 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1141 __ movq(rax, args.GetArgumentOperand(0)); 1141 __ movq(rax, args.GetArgumentOperand(0));
1142 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear); 1142 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
1143 // Input is a smi. Untag and load it onto the FPU stack. 1143 // Input is a smi. Untag and load it onto the FPU stack.
1144 // Then load the bits of the double into rbx. 1144 // Then load the bits of the double into rbx.
1145 __ SmiToInteger32(rax, rax); 1145 __ SmiToInteger32(rax, rax);
1146 __ subq(rsp, Immediate(kDoubleSize)); 1146 __ subq(rsp, Immediate(kDoubleSize));
1147 __ cvtlsi2sd(xmm1, rax); 1147 __ Cvtlsi2sd(xmm1, rax);
1148 __ movsd(Operand(rsp, 0), xmm1); 1148 __ movsd(Operand(rsp, 0), xmm1);
1149 __ movq(rbx, xmm1); 1149 __ movq(rbx, xmm1);
1150 __ movq(rdx, xmm1); 1150 __ movq(rdx, xmm1);
1151 __ fld_d(Operand(rsp, 0)); 1151 __ fld_d(Operand(rsp, 0));
1152 __ addq(rsp, Immediate(kDoubleSize)); 1152 __ addq(rsp, Immediate(kDoubleSize));
1153 __ jmp(&loaded, Label::kNear); 1153 __ jmp(&loaded, Label::kNear);
1154 1154
1155 __ bind(&input_not_smi); 1155 __ bind(&input_not_smi);
1156 // Check if input is a HeapNumber. 1156 // Check if input is a HeapNumber.
1157 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex); 1157 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex);
(...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after
1473 true); 1473 true);
1474 __ call(stub2.GetCode(masm->isolate()), RelocInfo::CODE_TARGET); 1474 __ call(stub2.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
1475 1475
1476 __ bind(&done); 1476 __ bind(&done);
1477 __ movl(rax, r8); 1477 __ movl(rax, r8);
1478 } 1478 }
1479 1479
1480 1480
1481 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) { 1481 void FloatingPointHelper::LoadSSE2SmiOperands(MacroAssembler* masm) {
1482 __ SmiToInteger32(kScratchRegister, rdx); 1482 __ SmiToInteger32(kScratchRegister, rdx);
1483 __ cvtlsi2sd(xmm0, kScratchRegister); 1483 __ Cvtlsi2sd(xmm0, kScratchRegister);
1484 __ SmiToInteger32(kScratchRegister, rax); 1484 __ SmiToInteger32(kScratchRegister, rax);
1485 __ cvtlsi2sd(xmm1, kScratchRegister); 1485 __ Cvtlsi2sd(xmm1, kScratchRegister);
1486 } 1486 }
1487 1487
1488 1488
1489 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm, 1489 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
1490 Label* not_numbers) { 1490 Label* not_numbers) {
1491 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done; 1491 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
1492 // Load operand in rdx into xmm0, or branch to not_numbers. 1492 // Load operand in rdx into xmm0, or branch to not_numbers.
1493 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex); 1493 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
1494 __ JumpIfSmi(rdx, &load_smi_rdx); 1494 __ JumpIfSmi(rdx, &load_smi_rdx);
1495 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx); 1495 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
1496 __ j(not_equal, not_numbers); // Argument in rdx is not a number. 1496 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
1497 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 1497 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1498 // Load operand in rax into xmm1, or branch to not_numbers. 1498 // Load operand in rax into xmm1, or branch to not_numbers.
1499 __ JumpIfSmi(rax, &load_smi_rax); 1499 __ JumpIfSmi(rax, &load_smi_rax);
1500 1500
1501 __ bind(&load_nonsmi_rax); 1501 __ bind(&load_nonsmi_rax);
1502 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx); 1502 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
1503 __ j(not_equal, not_numbers); 1503 __ j(not_equal, not_numbers);
1504 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); 1504 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
1505 __ jmp(&done); 1505 __ jmp(&done);
1506 1506
1507 __ bind(&load_smi_rdx); 1507 __ bind(&load_smi_rdx);
1508 __ SmiToInteger32(kScratchRegister, rdx); 1508 __ SmiToInteger32(kScratchRegister, rdx);
1509 __ cvtlsi2sd(xmm0, kScratchRegister); 1509 __ Cvtlsi2sd(xmm0, kScratchRegister);
1510 __ JumpIfNotSmi(rax, &load_nonsmi_rax); 1510 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
1511 1511
1512 __ bind(&load_smi_rax); 1512 __ bind(&load_smi_rax);
1513 __ SmiToInteger32(kScratchRegister, rax); 1513 __ SmiToInteger32(kScratchRegister, rax);
1514 __ cvtlsi2sd(xmm1, kScratchRegister); 1514 __ Cvtlsi2sd(xmm1, kScratchRegister);
1515 __ bind(&done); 1515 __ bind(&done);
1516 } 1516 }
1517 1517
1518 1518
1519 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm, 1519 void FloatingPointHelper::NumbersToSmis(MacroAssembler* masm,
1520 Register first, 1520 Register first,
1521 Register second, 1521 Register second,
1522 Register scratch1, 1522 Register scratch1,
1523 Register scratch2, 1523 Register scratch2,
1524 Register scratch3, 1524 Register scratch3,
(...skipping 12 matching lines...) Expand all
1537 __ j(not_equal, 1537 __ j(not_equal,
1538 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO) 1538 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO)
1539 ? &maybe_undefined_first 1539 ? &maybe_undefined_first
1540 : on_not_smis); 1540 : on_not_smis);
1541 // Convert HeapNumber to smi if possible. 1541 // Convert HeapNumber to smi if possible.
1542 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset)); 1542 __ movsd(xmm0, FieldOperand(first, HeapNumber::kValueOffset));
1543 __ movq(scratch2, xmm0); 1543 __ movq(scratch2, xmm0);
1544 __ cvttsd2siq(smi_result, xmm0); 1544 __ cvttsd2siq(smi_result, xmm0);
1545 // Check if conversion was successful by converting back and 1545 // Check if conversion was successful by converting back and
1546 // comparing to the original double's bits. 1546 // comparing to the original double's bits.
1547 __ cvtlsi2sd(xmm1, smi_result); 1547 __ Cvtlsi2sd(xmm1, smi_result);
1548 __ movq(kScratchRegister, xmm1); 1548 __ movq(kScratchRegister, xmm1);
1549 __ cmpq(scratch2, kScratchRegister); 1549 __ cmpq(scratch2, kScratchRegister);
1550 __ j(not_equal, on_not_smis); 1550 __ j(not_equal, on_not_smis);
1551 __ Integer32ToSmi(first, smi_result); 1551 __ Integer32ToSmi(first, smi_result);
1552 1552
1553 __ bind(&first_done); 1553 __ bind(&first_done);
1554 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done); 1554 __ JumpIfSmi(second, (on_success != NULL) ? on_success : &done);
1555 __ bind(&first_smi); 1555 __ bind(&first_smi);
1556 __ AssertNotSmi(second); 1556 __ AssertNotSmi(second);
1557 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map); 1557 __ cmpq(FieldOperand(second, HeapObject::kMapOffset), heap_number_map);
1558 __ j(not_equal, 1558 __ j(not_equal,
1559 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO) 1559 (convert_undefined == CONVERT_UNDEFINED_TO_ZERO)
1560 ? &maybe_undefined_second 1560 ? &maybe_undefined_second
1561 : on_not_smis); 1561 : on_not_smis);
1562 // Convert second to smi, if possible. 1562 // Convert second to smi, if possible.
1563 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset)); 1563 __ movsd(xmm0, FieldOperand(second, HeapNumber::kValueOffset));
1564 __ movq(scratch2, xmm0); 1564 __ movq(scratch2, xmm0);
1565 __ cvttsd2siq(smi_result, xmm0); 1565 __ cvttsd2siq(smi_result, xmm0);
1566 __ cvtlsi2sd(xmm1, smi_result); 1566 __ Cvtlsi2sd(xmm1, smi_result);
1567 __ movq(kScratchRegister, xmm1); 1567 __ movq(kScratchRegister, xmm1);
1568 __ cmpq(scratch2, kScratchRegister); 1568 __ cmpq(scratch2, kScratchRegister);
1569 __ j(not_equal, on_not_smis); 1569 __ j(not_equal, on_not_smis);
1570 __ Integer32ToSmi(second, smi_result); 1570 __ Integer32ToSmi(second, smi_result);
1571 if (on_success != NULL) { 1571 if (on_success != NULL) {
1572 __ jmp(on_success); 1572 __ jmp(on_success);
1573 } else { 1573 } else {
1574 __ jmp(&done); 1574 __ jmp(&done);
1575 } 1575 }
1576 1576
(...skipping 22 matching lines...) Expand all
1599 const Register scratch = rcx; 1599 const Register scratch = rcx;
1600 const XMMRegister double_result = xmm3; 1600 const XMMRegister double_result = xmm3;
1601 const XMMRegister double_base = xmm2; 1601 const XMMRegister double_base = xmm2;
1602 const XMMRegister double_exponent = xmm1; 1602 const XMMRegister double_exponent = xmm1;
1603 const XMMRegister double_scratch = xmm4; 1603 const XMMRegister double_scratch = xmm4;
1604 1604
1605 Label call_runtime, done, exponent_not_smi, int_exponent; 1605 Label call_runtime, done, exponent_not_smi, int_exponent;
1606 1606
1607 // Save 1 in double_result - we need this several times later on. 1607 // Save 1 in double_result - we need this several times later on.
1608 __ movq(scratch, Immediate(1)); 1608 __ movq(scratch, Immediate(1));
1609 __ cvtlsi2sd(double_result, scratch); 1609 __ Cvtlsi2sd(double_result, scratch);
1610 1610
1611 if (exponent_type_ == ON_STACK) { 1611 if (exponent_type_ == ON_STACK) {
1612 Label base_is_smi, unpack_exponent; 1612 Label base_is_smi, unpack_exponent;
1613 // The exponent and base are supplied as arguments on the stack. 1613 // The exponent and base are supplied as arguments on the stack.
1614 // This can only happen if the stub is called from non-optimized code. 1614 // This can only happen if the stub is called from non-optimized code.
1615 // Load input parameters from stack. 1615 // Load input parameters from stack.
1616 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER); 1616 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1617 __ movq(base, args.GetArgumentOperand(0)); 1617 __ movq(base, args.GetArgumentOperand(0));
1618 __ movq(exponent, args.GetArgumentOperand(1)); 1618 __ movq(exponent, args.GetArgumentOperand(1));
1619 __ JumpIfSmi(base, &base_is_smi, Label::kNear); 1619 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
1620 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset), 1620 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
1621 Heap::kHeapNumberMapRootIndex); 1621 Heap::kHeapNumberMapRootIndex);
1622 __ j(not_equal, &call_runtime); 1622 __ j(not_equal, &call_runtime);
1623 1623
1624 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset)); 1624 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
1625 __ jmp(&unpack_exponent, Label::kNear); 1625 __ jmp(&unpack_exponent, Label::kNear);
1626 1626
1627 __ bind(&base_is_smi); 1627 __ bind(&base_is_smi);
1628 __ SmiToInteger32(base, base); 1628 __ SmiToInteger32(base, base);
1629 __ cvtlsi2sd(double_base, base); 1629 __ Cvtlsi2sd(double_base, base);
1630 __ bind(&unpack_exponent); 1630 __ bind(&unpack_exponent);
1631 1631
1632 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); 1632 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
1633 __ SmiToInteger32(exponent, exponent); 1633 __ SmiToInteger32(exponent, exponent);
1634 __ jmp(&int_exponent); 1634 __ jmp(&int_exponent);
1635 1635
1636 __ bind(&exponent_not_smi); 1636 __ bind(&exponent_not_smi);
1637 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset), 1637 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
1638 Heap::kHeapNumberMapRootIndex); 1638 Heap::kHeapNumberMapRootIndex);
1639 __ j(not_equal, &call_runtime); 1639 __ j(not_equal, &call_runtime);
1640 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); 1640 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
1641 } else if (exponent_type_ == TAGGED) { 1641 } else if (exponent_type_ == TAGGED) {
1642 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear); 1642 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
1643 __ SmiToInteger32(exponent, exponent); 1643 __ SmiToInteger32(exponent, exponent);
1644 __ jmp(&int_exponent); 1644 __ jmp(&int_exponent);
1645 1645
1646 __ bind(&exponent_not_smi); 1646 __ bind(&exponent_not_smi);
1647 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset)); 1647 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
1648 } 1648 }
1649 1649
1650 if (exponent_type_ != INTEGER) { 1650 if (exponent_type_ != INTEGER) {
1651 Label fast_power; 1651 Label fast_power;
1652 // Detect integer exponents stored as double. 1652 // Detect integer exponents stored as double.
1653 __ cvttsd2si(exponent, double_exponent); 1653 __ cvttsd2si(exponent, double_exponent);
1654 // Skip to runtime if possibly NaN (indicated by the indefinite integer). 1654 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
1655 __ cmpl(exponent, Immediate(0x80000000u)); 1655 __ cmpl(exponent, Immediate(0x80000000u));
1656 __ j(equal, &call_runtime); 1656 __ j(equal, &call_runtime);
1657 __ cvtlsi2sd(double_scratch, exponent); 1657 __ Cvtlsi2sd(double_scratch, exponent);
1658 // Already ruled out NaNs for exponent. 1658 // Already ruled out NaNs for exponent.
1659 __ ucomisd(double_exponent, double_scratch); 1659 __ ucomisd(double_exponent, double_scratch);
1660 __ j(equal, &int_exponent); 1660 __ j(equal, &int_exponent);
1661 1661
1662 if (exponent_type_ == ON_STACK) { 1662 if (exponent_type_ == ON_STACK) {
1663 // Detect square root case. Crankshaft detects constant +/-0.5 at 1663 // Detect square root case. Crankshaft detects constant +/-0.5 at
1664 // compile time and uses DoMathPowHalf instead. We then skip this check 1664 // compile time and uses DoMathPowHalf instead. We then skip this check
1665 // for non-constant cases of +/-0.5 as these hardly occur. 1665 // for non-constant cases of +/-0.5 as these hardly occur.
1666 Label continue_sqrt, continue_rsqrt, not_plus_half; 1666 Label continue_sqrt, continue_rsqrt, not_plus_half;
1667 // Test for 0.5. 1667 // Test for 0.5.
(...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after
1807 __ divsd(double_scratch2, double_result); 1807 __ divsd(double_scratch2, double_result);
1808 __ movsd(double_result, double_scratch2); 1808 __ movsd(double_result, double_scratch2);
1809 // Test whether result is zero. Bail out to check for subnormal result. 1809 // Test whether result is zero. Bail out to check for subnormal result.
1810 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. 1810 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
1811 __ xorps(double_scratch2, double_scratch2); 1811 __ xorps(double_scratch2, double_scratch2);
1812 __ ucomisd(double_scratch2, double_result); 1812 __ ucomisd(double_scratch2, double_result);
1813 // double_exponent aliased as double_scratch2 has already been overwritten 1813 // double_exponent aliased as double_scratch2 has already been overwritten
1814 // and may not have contained the exponent value in the first place when the 1814 // and may not have contained the exponent value in the first place when the
1815 // input was a smi. We reset it with exponent value before bailing out. 1815 // input was a smi. We reset it with exponent value before bailing out.
1816 __ j(not_equal, &done); 1816 __ j(not_equal, &done);
1817 __ cvtlsi2sd(double_exponent, exponent); 1817 __ Cvtlsi2sd(double_exponent, exponent);
1818 1818
1819 // Returning or bailing out. 1819 // Returning or bailing out.
1820 Counters* counters = masm->isolate()->counters(); 1820 Counters* counters = masm->isolate()->counters();
1821 if (exponent_type_ == ON_STACK) { 1821 if (exponent_type_ == ON_STACK) {
1822 // The arguments are still on the stack. 1822 // The arguments are still on the stack.
1823 __ bind(&call_runtime); 1823 __ bind(&call_runtime);
1824 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1); 1824 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1825 1825
1826 // The stub is called from non-optimized code, which expects the result 1826 // The stub is called from non-optimized code, which expects the result
1827 // as heap number in rax. 1827 // as heap number in rax.
(...skipping 3557 matching lines...) Expand 10 before | Expand all | Expand 10 after
5385 5385
5386 // Load left and right operand. 5386 // Load left and right operand.
5387 Label done, left, left_smi, right_smi; 5387 Label done, left, left_smi, right_smi;
5388 __ JumpIfSmi(rax, &right_smi, Label::kNear); 5388 __ JumpIfSmi(rax, &right_smi, Label::kNear);
5389 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map(), NULL); 5389 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map(), NULL);
5390 __ j(not_equal, &maybe_undefined1, Label::kNear); 5390 __ j(not_equal, &maybe_undefined1, Label::kNear);
5391 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); 5391 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
5392 __ jmp(&left, Label::kNear); 5392 __ jmp(&left, Label::kNear);
5393 __ bind(&right_smi); 5393 __ bind(&right_smi);
5394 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet. 5394 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
5395 __ cvtlsi2sd(xmm1, rcx); 5395 __ Cvtlsi2sd(xmm1, rcx);
5396 5396
5397 __ bind(&left); 5397 __ bind(&left);
5398 __ JumpIfSmi(rdx, &left_smi, Label::kNear); 5398 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
5399 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map(), NULL); 5399 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map(), NULL);
5400 __ j(not_equal, &maybe_undefined2, Label::kNear); 5400 __ j(not_equal, &maybe_undefined2, Label::kNear);
5401 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset)); 5401 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
5402 __ jmp(&done); 5402 __ jmp(&done);
5403 __ bind(&left_smi); 5403 __ bind(&left_smi);
5404 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet. 5404 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
5405 __ cvtlsi2sd(xmm0, rcx); 5405 __ Cvtlsi2sd(xmm0, rcx);
5406 5406
5407 __ bind(&done); 5407 __ bind(&done);
5408 // Compare operands 5408 // Compare operands
5409 __ ucomisd(xmm0, xmm1); 5409 __ ucomisd(xmm0, xmm1);
5410 5410
5411 // Don't base result on EFLAGS when a NaN is involved. 5411 // Don't base result on EFLAGS when a NaN is involved.
5412 __ j(parity_even, &unordered, Label::kNear); 5412 __ j(parity_even, &unordered, Label::kNear);
5413 5413
5414 // Return a result of -1, 0, or 1, based on EFLAGS. 5414 // Return a result of -1, 0, or 1, based on EFLAGS.
5415 // Performing mov, because xor would destroy the flag register. 5415 // Performing mov, because xor would destroy the flag register.
(...skipping 1207 matching lines...) Expand 10 before | Expand all | Expand 10 after
6623 __ bind(&fast_elements_case); 6623 __ bind(&fast_elements_case);
6624 GenerateCase(masm, FAST_ELEMENTS); 6624 GenerateCase(masm, FAST_ELEMENTS);
6625 } 6625 }
6626 6626
6627 6627
6628 #undef __ 6628 #undef __
6629 6629
6630 } } // namespace v8::internal 6630 } } // namespace v8::internal
6631 6631
6632 #endif // V8_TARGET_ARCH_X64 6632 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698