OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1497 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1508 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { | 1508 void MacroAssembler::SmiAddConstant(const Operand& dst, Smi* constant) { |
1509 if (constant->value() != 0) { | 1509 if (constant->value() != 0) { |
1510 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); | 1510 addl(Operand(dst, kSmiShift / kBitsPerByte), Immediate(constant->value())); |
1511 } | 1511 } |
1512 } | 1512 } |
1513 | 1513 |
1514 | 1514 |
1515 void MacroAssembler::SmiAddConstant(Register dst, | 1515 void MacroAssembler::SmiAddConstant(Register dst, |
1516 Register src, | 1516 Register src, |
1517 Smi* constant, | 1517 Smi* constant, |
1518 Label* on_not_smi_result, | 1518 SmiOperationExecutionMode mode, |
| 1519 Label* bailout_label, |
1519 Label::Distance near_jump) { | 1520 Label::Distance near_jump) { |
1520 if (constant->value() == 0) { | 1521 if (constant->value() == 0) { |
1521 if (!dst.is(src)) { | 1522 if (!dst.is(src)) { |
1522 movq(dst, src); | 1523 movq(dst, src); |
1523 } | 1524 } |
1524 } else if (dst.is(src)) { | 1525 } else if (dst.is(src)) { |
1525 ASSERT(!dst.is(kScratchRegister)); | 1526 ASSERT(!dst.is(kScratchRegister)); |
1526 | |
1527 Label done; | |
1528 LoadSmiConstant(kScratchRegister, constant); | 1527 LoadSmiConstant(kScratchRegister, constant); |
1529 addq(dst, kScratchRegister); | 1528 addq(dst, kScratchRegister); |
1530 j(no_overflow, &done, Label::kNear); | 1529 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) { |
1531 // Restore src. | 1530 j(no_overflow, bailout_label, near_jump); |
1532 subq(dst, kScratchRegister); | 1531 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER)); |
1533 jmp(on_not_smi_result, near_jump); | 1532 subq(dst, kScratchRegister); |
1534 bind(&done); | 1533 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) { |
| 1534 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) { |
| 1535 Label done; |
| 1536 j(no_overflow, &done, Label::kNear); |
| 1537 subq(dst, kScratchRegister); |
| 1538 jmp(bailout_label, near_jump); |
| 1539 bind(&done); |
| 1540 } else { |
| 1541 // Bailout if overflow without reserving src. |
| 1542 j(overflow, bailout_label, near_jump); |
| 1543 } |
| 1544 } else { |
| 1545 CHECK(mode.IsEmpty()); |
| 1546 } |
1535 } else { | 1547 } else { |
| 1548 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER)); |
| 1549 ASSERT(mode.Contains(BAILOUT_ON_OVERFLOW)); |
1536 LoadSmiConstant(dst, constant); | 1550 LoadSmiConstant(dst, constant); |
1537 addq(dst, src); | 1551 addq(dst, src); |
1538 j(overflow, on_not_smi_result, near_jump); | 1552 j(overflow, bailout_label, near_jump); |
1539 } | 1553 } |
1540 } | 1554 } |
1541 | 1555 |
1542 | 1556 |
1543 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { | 1557 void MacroAssembler::SmiSubConstant(Register dst, Register src, Smi* constant) { |
1544 if (constant->value() == 0) { | 1558 if (constant->value() == 0) { |
1545 if (!dst.is(src)) { | 1559 if (!dst.is(src)) { |
1546 movq(dst, src); | 1560 movq(dst, src); |
1547 } | 1561 } |
1548 } else if (dst.is(src)) { | 1562 } else if (dst.is(src)) { |
(...skipping 11 matching lines...) Expand all Loading... |
1560 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); | 1574 LoadSmiConstant(dst, Smi::FromInt(-constant->value())); |
1561 addq(dst, src); | 1575 addq(dst, src); |
1562 } | 1576 } |
1563 } | 1577 } |
1564 } | 1578 } |
1565 | 1579 |
1566 | 1580 |
1567 void MacroAssembler::SmiSubConstant(Register dst, | 1581 void MacroAssembler::SmiSubConstant(Register dst, |
1568 Register src, | 1582 Register src, |
1569 Smi* constant, | 1583 Smi* constant, |
1570 Label* on_not_smi_result, | 1584 SmiOperationExecutionMode mode, |
| 1585 Label* bailout_label, |
1571 Label::Distance near_jump) { | 1586 Label::Distance near_jump) { |
1572 if (constant->value() == 0) { | 1587 if (constant->value() == 0) { |
1573 if (!dst.is(src)) { | 1588 if (!dst.is(src)) { |
1574 movq(dst, src); | 1589 movq(dst, src); |
1575 } | 1590 } |
1576 } else if (dst.is(src)) { | 1591 } else if (dst.is(src)) { |
1577 ASSERT(!dst.is(kScratchRegister)); | 1592 ASSERT(!dst.is(kScratchRegister)); |
| 1593 LoadSmiConstant(kScratchRegister, constant); |
| 1594 subq(dst, kScratchRegister); |
| 1595 if (mode.Contains(BAILOUT_ON_NO_OVERFLOW)) { |
| 1596 j(no_overflow, bailout_label, near_jump); |
| 1597 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER)); |
| 1598 addq(dst, kScratchRegister); |
| 1599 } else if (mode.Contains(BAILOUT_ON_OVERFLOW)) { |
| 1600 if (mode.Contains(PRESERVE_SOURCE_REGISTER)) { |
| 1601 Label done; |
| 1602 j(no_overflow, &done, Label::kNear); |
| 1603 addq(dst, kScratchRegister); |
| 1604 jmp(bailout_label, near_jump); |
| 1605 bind(&done); |
| 1606 } else { |
| 1607 // Bailout if overflow without reserving src. |
| 1608 j(overflow, bailout_label, near_jump); |
| 1609 } |
| 1610 } else { |
| 1611 CHECK(mode.IsEmpty()); |
| 1612 } |
| 1613 } else { |
| 1614 ASSERT(mode.Contains(PRESERVE_SOURCE_REGISTER)); |
| 1615 ASSERT(mode.Contains(BAILOUT_ON_OVERFLOW)); |
1578 if (constant->value() == Smi::kMinValue) { | 1616 if (constant->value() == Smi::kMinValue) { |
1579 // Subtracting min-value from any non-negative value will overflow. | 1617 ASSERT(!dst.is(kScratchRegister)); |
1580 // We test the non-negativeness before doing the subtraction. | 1618 movq(dst, src); |
1581 testq(src, src); | |
1582 j(not_sign, on_not_smi_result, near_jump); | |
1583 LoadSmiConstant(kScratchRegister, constant); | 1619 LoadSmiConstant(kScratchRegister, constant); |
1584 subq(dst, kScratchRegister); | 1620 subq(dst, kScratchRegister); |
1585 } else { | 1621 j(overflow, bailout_label, near_jump); |
1586 // Subtract by adding the negation. | |
1587 LoadSmiConstant(kScratchRegister, Smi::FromInt(-constant->value())); | |
1588 addq(kScratchRegister, dst); | |
1589 j(overflow, on_not_smi_result, near_jump); | |
1590 movq(dst, kScratchRegister); | |
1591 } | |
1592 } else { | |
1593 if (constant->value() == Smi::kMinValue) { | |
1594 // Subtracting min-value from any non-negative value will overflow. | |
1595 // We test the non-negativeness before doing the subtraction. | |
1596 testq(src, src); | |
1597 j(not_sign, on_not_smi_result, near_jump); | |
1598 LoadSmiConstant(dst, constant); | |
1599 // Adding and subtracting the min-value gives the same result, it only | |
1600 // differs on the overflow bit, which we don't check here. | |
1601 addq(dst, src); | |
1602 } else { | 1622 } else { |
1603 // Subtract by adding the negation. | 1623 // Subtract by adding the negation. |
1604 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); | 1624 LoadSmiConstant(dst, Smi::FromInt(-(constant->value()))); |
1605 addq(dst, src); | 1625 addq(dst, src); |
1606 j(overflow, on_not_smi_result, near_jump); | 1626 j(overflow, bailout_label, near_jump); |
1607 } | 1627 } |
1608 } | 1628 } |
1609 } | 1629 } |
1610 | 1630 |
1611 | 1631 |
1612 void MacroAssembler::SmiNeg(Register dst, | 1632 void MacroAssembler::SmiNeg(Register dst, |
1613 Register src, | 1633 Register src, |
1614 Label* on_smi_result, | 1634 Label* on_smi_result, |
1615 Label::Distance near_jump) { | 1635 Label::Distance near_jump) { |
1616 if (dst.is(src)) { | 1636 if (dst.is(src)) { |
(...skipping 3310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4927 j(greater, &no_memento_available); | 4947 j(greater, &no_memento_available); |
4928 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), | 4948 CompareRoot(MemOperand(scratch_reg, -AllocationMemento::kSize), |
4929 Heap::kAllocationMementoMapRootIndex); | 4949 Heap::kAllocationMementoMapRootIndex); |
4930 bind(&no_memento_available); | 4950 bind(&no_memento_available); |
4931 } | 4951 } |
4932 | 4952 |
4933 | 4953 |
4934 } } // namespace v8::internal | 4954 } } // namespace v8::internal |
4935 | 4955 |
4936 #endif // V8_TARGET_ARCH_X64 | 4956 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |