Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(570)

Side by Side Diff: src/arm/macro-assembler-arm.cc

Issue 12440041: Generalizing remaining Allocate functions in the macro assemblers used in pretenuring. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/arm/stub-cache-arm.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1653 matching lines...) Expand 10 before | Expand all | Expand 10 after
1664 b(hi, gc_required); 1664 b(hi, gc_required);
1665 str(scratch2, MemOperand(topaddr)); 1665 str(scratch2, MemOperand(topaddr));
1666 1666
1667 // Tag object if requested. 1667 // Tag object if requested.
1668 if ((flags & TAG_OBJECT) != 0) { 1668 if ((flags & TAG_OBJECT) != 0) {
1669 add(result, result, Operand(kHeapObjectTag)); 1669 add(result, result, Operand(kHeapObjectTag));
1670 } 1670 }
1671 } 1671 }
1672 1672
1673 1673
1674 void MacroAssembler::AllocateInNewSpace(Register object_size, 1674 void MacroAssembler::Allocate(Register object_size,
1675 Register result, 1675 Register result,
1676 Register scratch1, 1676 Register scratch1,
1677 Register scratch2, 1677 Register scratch2,
1678 Label* gc_required, 1678 Label* gc_required,
1679 AllocationFlags flags) { 1679 AllocationFlags flags) {
1680 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1681 if (!FLAG_inline_new) { 1680 if (!FLAG_inline_new) {
1682 if (emit_debug_code()) { 1681 if (emit_debug_code()) {
1683 // Trash the registers to simulate an allocation failure. 1682 // Trash the registers to simulate an allocation failure.
1684 mov(result, Operand(0x7091)); 1683 mov(result, Operand(0x7091));
1685 mov(scratch1, Operand(0x7191)); 1684 mov(scratch1, Operand(0x7191));
1686 mov(scratch2, Operand(0x7291)); 1685 mov(scratch2, Operand(0x7291));
1687 } 1686 }
1688 jmp(gc_required); 1687 jmp(gc_required);
1689 return; 1688 return;
1690 } 1689 }
1691 1690
1692 // Assert that the register arguments are different and that none of 1691 // Assert that the register arguments are different and that none of
1693 // them are ip. ip is used explicitly in the code generated below. 1692 // them are ip. ip is used explicitly in the code generated below.
1694 ASSERT(!result.is(scratch1)); 1693 ASSERT(!result.is(scratch1));
1695 ASSERT(!result.is(scratch2)); 1694 ASSERT(!result.is(scratch2));
1696 ASSERT(!scratch1.is(scratch2)); 1695 ASSERT(!scratch1.is(scratch2));
1697 ASSERT(!object_size.is(ip)); 1696 ASSERT(!object_size.is(ip));
1698 ASSERT(!result.is(ip)); 1697 ASSERT(!result.is(ip));
1699 ASSERT(!scratch1.is(ip)); 1698 ASSERT(!scratch1.is(ip));
1700 ASSERT(!scratch2.is(ip)); 1699 ASSERT(!scratch2.is(ip));
1701 1700
1702 // Check relative positions of allocation top and limit addresses. 1701 // Check relative positions of allocation top and limit addresses.
1703 // The values must be adjacent in memory to allow the use of LDM. 1702 // The values must be adjacent in memory to allow the use of LDM.
1704 // Also, assert that the registers are numbered such that the values 1703 // Also, assert that the registers are numbered such that the values
1705 // are loaded in the correct order. 1704 // are loaded in the correct order.
1706 ExternalReference new_space_allocation_top = 1705 ExternalReference allocation_top =
1707 ExternalReference::new_space_allocation_top_address(isolate()); 1706 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1708 ExternalReference new_space_allocation_limit = 1707 ExternalReference allocation_limit =
1709 ExternalReference::new_space_allocation_limit_address(isolate()); 1708 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1710 intptr_t top = 1709 intptr_t top =
1711 reinterpret_cast<intptr_t>(new_space_allocation_top.address()); 1710 reinterpret_cast<intptr_t>(allocation_top.address());
1712 intptr_t limit = 1711 intptr_t limit =
1713 reinterpret_cast<intptr_t>(new_space_allocation_limit.address()); 1712 reinterpret_cast<intptr_t>(allocation_limit.address());
1714 ASSERT((limit - top) == kPointerSize); 1713 ASSERT((limit - top) == kPointerSize);
1715 ASSERT(result.code() < ip.code()); 1714 ASSERT(result.code() < ip.code());
1716 1715
1717 // Set up allocation top address. 1716 // Set up allocation top address.
1718 Register topaddr = scratch1; 1717 Register topaddr = scratch1;
1719 mov(topaddr, Operand(new_space_allocation_top)); 1718 mov(topaddr, Operand(allocation_top));
1720 1719
1721 // This code stores a temporary value in ip. This is OK, as the code below 1720 // This code stores a temporary value in ip. This is OK, as the code below
1722 // does not need ip for implicit literal generation. 1721 // does not need ip for implicit literal generation.
1723 if ((flags & RESULT_CONTAINS_TOP) == 0) { 1722 if ((flags & RESULT_CONTAINS_TOP) == 0) {
1724 // Load allocation top into result and allocation limit into ip. 1723 // Load allocation top into result and allocation limit into ip.
1725 ldm(ia, topaddr, result.bit() | ip.bit()); 1724 ldm(ia, topaddr, result.bit() | ip.bit());
1726 } else { 1725 } else {
1727 if (emit_debug_code()) { 1726 if (emit_debug_code()) {
1728 // Assert that result actually contains top on entry. ip is used 1727 // Assert that result actually contains top on entry. ip is used
1729 // immediately below so this use of ip does not cause difference with 1728 // immediately below so this use of ip does not cause difference with
1730 // respect to register content between debug and release mode. 1729 // respect to register content between debug and release mode.
1731 ldr(ip, MemOperand(topaddr)); 1730 ldr(ip, MemOperand(topaddr));
1732 cmp(result, ip); 1731 cmp(result, ip);
1733 Check(eq, "Unexpected allocation top"); 1732 Check(eq, "Unexpected allocation top");
1734 } 1733 }
1735 // Load allocation limit into ip. Result already contains allocation top. 1734 // Load allocation limit into ip. Result already contains allocation top.
1736 ldr(ip, MemOperand(topaddr, limit - top)); 1735 ldr(ip, MemOperand(topaddr, limit - top));
1737 } 1736 }
1738 1737
1739 if ((flags & DOUBLE_ALIGNMENT) != 0) { 1738 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1740 // Align the next allocation. Storing the filler map without checking top is 1739 // Align the next allocation. Storing the filler map without checking top is
1741 // always safe because the limit of the heap is always aligned. 1740 // always safe because the limit of the heap is always aligned.
1741 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1742 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); 1742 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1743 and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC); 1743 and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
1744 Label aligned; 1744 Label aligned;
1745 b(eq, &aligned); 1745 b(eq, &aligned);
1746 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); 1746 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
1747 str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex)); 1747 str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex));
1748 bind(&aligned); 1748 bind(&aligned);
1749 } 1749 }
1750 1750
1751 // Calculate new top and bail out if new space is exhausted. Use result 1751 // Calculate new top and bail out if new space is exhausted. Use result
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
1802 Label* gc_required) { 1802 Label* gc_required) {
1803 // Calculate the number of bytes needed for the characters in the string while 1803 // Calculate the number of bytes needed for the characters in the string while
1804 // observing object alignment. 1804 // observing object alignment.
1805 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); 1805 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1806 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars. 1806 mov(scratch1, Operand(length, LSL, 1)); // Length in bytes, not chars.
1807 add(scratch1, scratch1, 1807 add(scratch1, scratch1,
1808 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize)); 1808 Operand(kObjectAlignmentMask + SeqTwoByteString::kHeaderSize));
1809 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask)); 1809 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
1810 1810
1811 // Allocate two-byte string in new space. 1811 // Allocate two-byte string in new space.
1812 AllocateInNewSpace(scratch1, 1812 Allocate(scratch1,
1813 result, 1813 result,
1814 scratch2, 1814 scratch2,
1815 scratch3, 1815 scratch3,
1816 gc_required, 1816 gc_required,
1817 TAG_OBJECT); 1817 TAG_OBJECT);
1818 1818
1819 // Set the map, length and hash field. 1819 // Set the map, length and hash field.
1820 InitializeNewString(result, 1820 InitializeNewString(result,
1821 length, 1821 length,
1822 Heap::kStringMapRootIndex, 1822 Heap::kStringMapRootIndex,
1823 scratch1, 1823 scratch1,
1824 scratch2); 1824 scratch2);
1825 } 1825 }
1826 1826
1827 1827
1828 void MacroAssembler::AllocateAsciiString(Register result, 1828 void MacroAssembler::AllocateAsciiString(Register result,
1829 Register length, 1829 Register length,
1830 Register scratch1, 1830 Register scratch1,
1831 Register scratch2, 1831 Register scratch2,
1832 Register scratch3, 1832 Register scratch3,
1833 Label* gc_required) { 1833 Label* gc_required) {
1834 // Calculate the number of bytes needed for the characters in the string while 1834 // Calculate the number of bytes needed for the characters in the string while
1835 // observing object alignment. 1835 // observing object alignment.
1836 ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0); 1836 ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1837 ASSERT(kCharSize == 1); 1837 ASSERT(kCharSize == 1);
1838 add(scratch1, length, 1838 add(scratch1, length,
1839 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize)); 1839 Operand(kObjectAlignmentMask + SeqOneByteString::kHeaderSize));
1840 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask)); 1840 and_(scratch1, scratch1, Operand(~kObjectAlignmentMask));
1841 1841
1842 // Allocate ASCII string in new space. 1842 // Allocate ASCII string in new space.
1843 AllocateInNewSpace(scratch1, 1843 Allocate(scratch1,
1844 result, 1844 result,
1845 scratch2, 1845 scratch2,
1846 scratch3, 1846 scratch3,
1847 gc_required, 1847 gc_required,
1848 TAG_OBJECT); 1848 TAG_OBJECT);
1849 1849
1850 // Set the map, length and hash field. 1850 // Set the map, length and hash field.
1851 InitializeNewString(result, 1851 InitializeNewString(result,
1852 length, 1852 length,
1853 Heap::kAsciiStringMapRootIndex, 1853 Heap::kAsciiStringMapRootIndex,
1854 scratch1, 1854 scratch1,
1855 scratch2); 1855 scratch2);
1856 } 1856 }
1857 1857
1858 1858
(...skipping 2090 matching lines...) Expand 10 before | Expand all | Expand 10 after
3949 void CodePatcher::EmitCondition(Condition cond) { 3949 void CodePatcher::EmitCondition(Condition cond) {
3950 Instr instr = Assembler::instr_at(masm_.pc_); 3950 Instr instr = Assembler::instr_at(masm_.pc_);
3951 instr = (instr & ~kCondMask) | cond; 3951 instr = (instr & ~kCondMask) | cond;
3952 masm_.emit(instr); 3952 masm_.emit(instr);
3953 } 3953 }
3954 3954
3955 3955
3956 } } // namespace v8::internal 3956 } } // namespace v8::internal
3957 3957
3958 #endif // V8_TARGET_ARCH_ARM 3958 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/macro-assembler-arm.h ('k') | src/arm/stub-cache-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698