Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(88)

Side by Side Diff: src/ia32/macro-assembler-ia32.cc

Issue 2035413003: Revert of Provide a tagged allocation top pointer. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/ia32/code-stubs-ia32.cc ('k') | src/mips/code-stubs-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_IA32 5 #if V8_TARGET_ARCH_IA32
6 6
7 #include "src/base/bits.h" 7 #include "src/base/bits.h"
8 #include "src/base/division-by-constant.h" 8 #include "src/base/division-by-constant.h"
9 #include "src/bootstrapper.h" 9 #include "src/bootstrapper.h"
10 #include "src/codegen.h" 10 #include "src/codegen.h"
(...skipping 1486 matching lines...) Expand 10 before | Expand all | Expand 10 after
1497 mov(result, Operand(scratch, 0)); 1497 mov(result, Operand(scratch, 0));
1498 } 1498 }
1499 } 1499 }
1500 1500
1501 1501
1502 void MacroAssembler::UpdateAllocationTopHelper(Register result_end, 1502 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1503 Register scratch, 1503 Register scratch,
1504 AllocationFlags flags) { 1504 AllocationFlags flags) {
1505 if (emit_debug_code()) { 1505 if (emit_debug_code()) {
1506 test(result_end, Immediate(kObjectAlignmentMask)); 1506 test(result_end, Immediate(kObjectAlignmentMask));
1507 Check(not_zero, kUnalignedAllocationInNewSpace); 1507 Check(zero, kUnalignedAllocationInNewSpace);
1508 } 1508 }
1509 1509
1510 ExternalReference allocation_top = 1510 ExternalReference allocation_top =
1511 AllocationUtils::GetAllocationTopReference(isolate(), flags); 1511 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1512 1512
1513 // Update new top. Use scratch if available. 1513 // Update new top. Use scratch if available.
1514 if (scratch.is(no_reg)) { 1514 if (scratch.is(no_reg)) {
1515 mov(Operand::StaticVariable(allocation_top), result_end); 1515 mov(Operand::StaticVariable(allocation_top), result_end);
1516 } else { 1516 } else {
1517 mov(Operand(scratch, 0), result_end); 1517 mov(Operand(scratch, 0), result_end);
(...skipping 30 matching lines...) Expand all
1548 LoadAllocationTopHelper(result, scratch, flags); 1548 LoadAllocationTopHelper(result, scratch, flags);
1549 1549
1550 ExternalReference allocation_limit = 1550 ExternalReference allocation_limit =
1551 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 1551 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1552 1552
1553 // Align the next allocation. Storing the filler map without checking top is 1553 // Align the next allocation. Storing the filler map without checking top is
1554 // safe in new-space because the limit of the heap is aligned there. 1554 // safe in new-space because the limit of the heap is aligned there.
1555 if ((flags & DOUBLE_ALIGNMENT) != 0) { 1555 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1556 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); 1556 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1557 Label aligned; 1557 Label aligned;
1558 test(result, Immediate(kDoubleAlignmentMaskTagged)); 1558 test(result, Immediate(kDoubleAlignmentMask));
1559 j(zero, &aligned, Label::kNear); 1559 j(zero, &aligned, Label::kNear);
1560 if ((flags & PRETENURE) != 0) { 1560 if ((flags & PRETENURE) != 0) {
1561 cmp(result, Operand::StaticVariable(allocation_limit)); 1561 cmp(result, Operand::StaticVariable(allocation_limit));
1562 j(above_equal, gc_required); 1562 j(above_equal, gc_required);
1563 } 1563 }
1564 mov(Operand(result, 0), 1564 mov(Operand(result, 0),
1565 Immediate(isolate()->factory()->one_pointer_filler_map())); 1565 Immediate(isolate()->factory()->one_pointer_filler_map()));
1566 add(result, Immediate(kDoubleSize / 2)); 1566 add(result, Immediate(kDoubleSize / 2));
1567 bind(&aligned); 1567 bind(&aligned);
1568 } 1568 }
1569 1569
1570 // Calculate new top and bail out if space is exhausted. 1570 // Calculate new top and bail out if space is exhausted.
1571 Register top_reg = result_end.is_valid() ? result_end : result; 1571 Register top_reg = result_end.is_valid() ? result_end : result;
1572 1572
1573 if (!top_reg.is(result)) { 1573 if (!top_reg.is(result)) {
1574 mov(top_reg, result); 1574 mov(top_reg, result);
1575 } 1575 }
1576 add(top_reg, Immediate(object_size)); 1576 add(top_reg, Immediate(object_size));
1577 cmp(top_reg, Operand::StaticVariable(allocation_limit)); 1577 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1578 j(above, gc_required); 1578 j(above, gc_required);
1579 1579
1580 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) { 1580 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
1581 // The top pointer is not updated for allocation folding dominators. 1581 // The top pointer is not updated for allocation folding dominators.
1582 UpdateAllocationTopHelper(top_reg, scratch, flags); 1582 UpdateAllocationTopHelper(top_reg, scratch, flags);
1583 } 1583 }
1584 1584
1585 if (top_reg.is(result)) { 1585 if (top_reg.is(result)) {
1586 sub(result, Immediate(object_size)); 1586 sub(result, Immediate(object_size - kHeapObjectTag));
1587 } else {
1588 // Tag the result.
1589 DCHECK(kHeapObjectTag == 1);
1590 inc(result);
1587 } 1591 }
1588 } 1592 }
1589 1593
1590 1594
1591 void MacroAssembler::Allocate(int header_size, 1595 void MacroAssembler::Allocate(int header_size,
1592 ScaleFactor element_size, 1596 ScaleFactor element_size,
1593 Register element_count, 1597 Register element_count,
1594 RegisterValueType element_count_type, 1598 RegisterValueType element_count_type,
1595 Register result, 1599 Register result,
1596 Register result_end, 1600 Register result_end,
(...skipping 22 matching lines...) Expand all
1619 LoadAllocationTopHelper(result, scratch, flags); 1623 LoadAllocationTopHelper(result, scratch, flags);
1620 1624
1621 ExternalReference allocation_limit = 1625 ExternalReference allocation_limit =
1622 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 1626 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1623 1627
1624 // Align the next allocation. Storing the filler map without checking top is 1628 // Align the next allocation. Storing the filler map without checking top is
1625 // safe in new-space because the limit of the heap is aligned there. 1629 // safe in new-space because the limit of the heap is aligned there.
1626 if ((flags & DOUBLE_ALIGNMENT) != 0) { 1630 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1627 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); 1631 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1628 Label aligned; 1632 Label aligned;
1629 test(result, Immediate(kDoubleAlignmentMaskTagged)); 1633 test(result, Immediate(kDoubleAlignmentMask));
1630 j(zero, &aligned, Label::kNear); 1634 j(zero, &aligned, Label::kNear);
1631 if ((flags & PRETENURE) != 0) { 1635 if ((flags & PRETENURE) != 0) {
1632 cmp(result, Operand::StaticVariable(allocation_limit)); 1636 cmp(result, Operand::StaticVariable(allocation_limit));
1633 j(above_equal, gc_required); 1637 j(above_equal, gc_required);
1634 } 1638 }
1635 mov(Operand(result, -kHeapObjectTag), 1639 mov(Operand(result, 0),
1636 Immediate(isolate()->factory()->one_pointer_filler_map())); 1640 Immediate(isolate()->factory()->one_pointer_filler_map()));
1637 add(result, Immediate(kDoubleSize / 2)); 1641 add(result, Immediate(kDoubleSize / 2));
1638 bind(&aligned); 1642 bind(&aligned);
1639 } 1643 }
1640 1644
1641 // Calculate new top and bail out if space is exhausted. 1645 // Calculate new top and bail out if space is exhausted.
1642 // We assume that element_count*element_size + header_size does not 1646 // We assume that element_count*element_size + header_size does not
1643 // overflow. 1647 // overflow.
1644 if (element_count_type == REGISTER_VALUE_IS_SMI) { 1648 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1645 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1); 1649 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1646 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2); 1650 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1647 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4); 1651 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1648 DCHECK(element_size >= times_2); 1652 DCHECK(element_size >= times_2);
1649 DCHECK(kSmiTagSize == 1); 1653 DCHECK(kSmiTagSize == 1);
1650 element_size = static_cast<ScaleFactor>(element_size - 1); 1654 element_size = static_cast<ScaleFactor>(element_size - 1);
1651 } else { 1655 } else {
1652 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32); 1656 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1653 } 1657 }
1654 1658
1655 lea(result_end, Operand(element_count, element_size, header_size)); 1659 lea(result_end, Operand(element_count, element_size, header_size));
1656 add(result_end, result); 1660 add(result_end, result);
1657 cmp(result_end, Operand::StaticVariable(allocation_limit)); 1661 cmp(result_end, Operand::StaticVariable(allocation_limit));
1658 j(above, gc_required); 1662 j(above, gc_required);
1659 1663
1664 // Tag result.
1665 DCHECK(kHeapObjectTag == 1);
1666 inc(result);
1667
1660 UpdateAllocationTopHelper(result_end, scratch, flags); 1668 UpdateAllocationTopHelper(result_end, scratch, flags);
1661 } 1669 }
1662 1670
1663 1671
1664 void MacroAssembler::Allocate(Register object_size, 1672 void MacroAssembler::Allocate(Register object_size,
1665 Register result, 1673 Register result,
1666 Register result_end, 1674 Register result_end,
1667 Register scratch, 1675 Register scratch,
1668 Label* gc_required, 1676 Label* gc_required,
1669 AllocationFlags flags) { 1677 AllocationFlags flags) {
(...skipping 18 matching lines...) Expand all
1688 LoadAllocationTopHelper(result, scratch, flags); 1696 LoadAllocationTopHelper(result, scratch, flags);
1689 1697
1690 ExternalReference allocation_limit = 1698 ExternalReference allocation_limit =
1691 AllocationUtils::GetAllocationLimitReference(isolate(), flags); 1699 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1692 1700
1693 // Align the next allocation. Storing the filler map without checking top is 1701 // Align the next allocation. Storing the filler map without checking top is
1694 // safe in new-space because the limit of the heap is aligned there. 1702 // safe in new-space because the limit of the heap is aligned there.
1695 if ((flags & DOUBLE_ALIGNMENT) != 0) { 1703 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1696 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); 1704 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1697 Label aligned; 1705 Label aligned;
1698 test(result, Immediate(kDoubleAlignmentMaskTagged)); 1706 test(result, Immediate(kDoubleAlignmentMask));
1699 j(zero, &aligned, Label::kNear); 1707 j(zero, &aligned, Label::kNear);
1700 if ((flags & PRETENURE) != 0) { 1708 if ((flags & PRETENURE) != 0) {
1701 cmp(result, Operand::StaticVariable(allocation_limit)); 1709 cmp(result, Operand::StaticVariable(allocation_limit));
1702 j(above_equal, gc_required); 1710 j(above_equal, gc_required);
1703 } 1711 }
1704 mov(Operand(result, -kHeapObjectTag), 1712 mov(Operand(result, 0),
1705 Immediate(isolate()->factory()->one_pointer_filler_map())); 1713 Immediate(isolate()->factory()->one_pointer_filler_map()));
1706 add(result, Immediate(kDoubleSize / 2)); 1714 add(result, Immediate(kDoubleSize / 2));
1707 bind(&aligned); 1715 bind(&aligned);
1708 } 1716 }
1709 1717
1710 // Calculate new top and bail out if space is exhausted. 1718 // Calculate new top and bail out if space is exhausted.
1711 if (!object_size.is(result_end)) { 1719 if (!object_size.is(result_end)) {
1712 mov(result_end, object_size); 1720 mov(result_end, object_size);
1713 } 1721 }
1714 add(result_end, result); 1722 add(result_end, result);
1715 cmp(result_end, Operand::StaticVariable(allocation_limit)); 1723 cmp(result_end, Operand::StaticVariable(allocation_limit));
1716 j(above, gc_required); 1724 j(above, gc_required);
1717 1725
1726 // Tag result.
1727 DCHECK(kHeapObjectTag == 1);
1728 inc(result);
1729
1718 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) { 1730 if ((flags & ALLOCATION_FOLDING_DOMINATOR) == 0) {
1719 // The top pointer is not updated for allocation folding dominators. 1731 // The top pointer is not updated for allocation folding dominators.
1720 UpdateAllocationTopHelper(result_end, scratch, flags); 1732 UpdateAllocationTopHelper(result_end, scratch, flags);
1721 } 1733 }
1722 } 1734 }
1723 1735
1724 void MacroAssembler::FastAllocate(int object_size, Register result, 1736 void MacroAssembler::FastAllocate(int object_size, Register result,
1725 Register result_end, AllocationFlags flags) { 1737 Register result_end, AllocationFlags flags) {
1726 DCHECK(!result.is(result_end)); 1738 DCHECK(!result.is(result_end));
1727 // Load address of new object into result. 1739 // Load address of new object into result.
1728 LoadAllocationTopHelper(result, no_reg, flags); 1740 LoadAllocationTopHelper(result, no_reg, flags);
1729 1741
1730 if ((flags & DOUBLE_ALIGNMENT) != 0) { 1742 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1731 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); 1743 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1732 Label aligned; 1744 Label aligned;
1733 test(result, Immediate(kDoubleAlignmentMaskTagged)); 1745 test(result, Immediate(kDoubleAlignmentMask));
1734 j(zero, &aligned, Label::kNear); 1746 j(zero, &aligned, Label::kNear);
1735 mov(Operand(result, -kHeapObjectTag), 1747 mov(Operand(result, 0),
1736 Immediate(isolate()->factory()->one_pointer_filler_map())); 1748 Immediate(isolate()->factory()->one_pointer_filler_map()));
1737 add(result, Immediate(kDoubleSize / 2)); 1749 add(result, Immediate(kDoubleSize / 2));
1738 bind(&aligned); 1750 bind(&aligned);
1739 } 1751 }
1740 1752
1741 lea(result_end, Operand(result, object_size)); 1753 lea(result_end, Operand(result, object_size));
1742 UpdateAllocationTopHelper(result_end, no_reg, flags); 1754 UpdateAllocationTopHelper(result_end, no_reg, flags);
1755
1756 DCHECK(kHeapObjectTag == 1);
1757 inc(result);
1743 } 1758 }
1744 1759
1745 void MacroAssembler::FastAllocate(Register object_size, Register result, 1760 void MacroAssembler::FastAllocate(Register object_size, Register result,
1746 Register result_end, AllocationFlags flags) { 1761 Register result_end, AllocationFlags flags) {
1747 DCHECK(!result.is(result_end)); 1762 DCHECK(!result.is(result_end));
1748 // Load address of new object into result. 1763 // Load address of new object into result.
1749 LoadAllocationTopHelper(result, no_reg, flags); 1764 LoadAllocationTopHelper(result, no_reg, flags);
1750 1765
1751 if ((flags & DOUBLE_ALIGNMENT) != 0) { 1766 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1752 DCHECK(kPointerAlignment * 2 == kDoubleAlignment); 1767 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1753 Label aligned; 1768 Label aligned;
1754 test(result, Immediate(kDoubleAlignmentMaskTagged)); 1769 test(result, Immediate(kDoubleAlignmentMask));
1755 j(zero, &aligned, Label::kNear); 1770 j(zero, &aligned, Label::kNear);
1756 mov(Operand(result, -kHeapObjectTag), 1771 mov(Operand(result, 0),
1757 Immediate(isolate()->factory()->one_pointer_filler_map())); 1772 Immediate(isolate()->factory()->one_pointer_filler_map()));
1758 add(result, Immediate(kDoubleSize / 2)); 1773 add(result, Immediate(kDoubleSize / 2));
1759 bind(&aligned); 1774 bind(&aligned);
1760 } 1775 }
1761 1776
1762 lea(result_end, Operand(result, object_size, times_1, 0)); 1777 lea(result_end, Operand(result, object_size, times_1, 0));
1763 UpdateAllocationTopHelper(result_end, no_reg, flags); 1778 UpdateAllocationTopHelper(result_end, no_reg, flags);
1779
1780 DCHECK(kHeapObjectTag == 1);
1781 inc(result);
1764 } 1782 }
1765 1783
1766 1784
1767 void MacroAssembler::AllocateHeapNumber(Register result, 1785 void MacroAssembler::AllocateHeapNumber(Register result,
1768 Register scratch1, 1786 Register scratch1,
1769 Register scratch2, 1787 Register scratch2,
1770 Label* gc_required, 1788 Label* gc_required,
1771 MutableMode mode) { 1789 MutableMode mode) {
1772 // Allocate heap number in new space. 1790 // Allocate heap number in new space.
1773 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, 1791 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
(...skipping 1663 matching lines...) Expand 10 before | Expand all | Expand 10 after
3437 mov(eax, dividend); 3455 mov(eax, dividend);
3438 shr(eax, 31); 3456 shr(eax, 31);
3439 add(edx, eax); 3457 add(edx, eax);
3440 } 3458 }
3441 3459
3442 3460
3443 } // namespace internal 3461 } // namespace internal
3444 } // namespace v8 3462 } // namespace v8
3445 3463
3446 #endif // V8_TARGET_ARCH_IA32 3464 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/code-stubs-ia32.cc ('k') | src/mips/code-stubs-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698