OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1557 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask))); | 1557 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask))); |
1558 b(ne, miss); | 1558 b(ne, miss); |
1559 | 1559 |
1560 // Get the value at the masked, scaled index and return. | 1560 // Get the value at the masked, scaled index and return. |
1561 const int kValueOffset = | 1561 const int kValueOffset = |
1562 SeededNumberDictionary::kElementsStartOffset + kPointerSize; | 1562 SeededNumberDictionary::kElementsStartOffset + kPointerSize; |
1563 ldr(result, FieldMemOperand(t2, kValueOffset)); | 1563 ldr(result, FieldMemOperand(t2, kValueOffset)); |
1564 } | 1564 } |
1565 | 1565 |
1566 | 1566 |
1567 void MacroAssembler::AllocateInNewSpace(int object_size, | 1567 void MacroAssembler::Allocate(int object_size, |
1568 Register result, | 1568 Register result, |
1569 Register scratch1, | 1569 Register scratch1, |
1570 Register scratch2, | 1570 Register scratch2, |
1571 Label* gc_required, | 1571 Label* gc_required, |
1572 AllocationFlags flags) { | 1572 AllocationFlags flags) { |
1573 if (!FLAG_inline_new) { | 1573 if (!FLAG_inline_new) { |
1574 if (emit_debug_code()) { | 1574 if (emit_debug_code()) { |
1575 // Trash the registers to simulate an allocation failure. | 1575 // Trash the registers to simulate an allocation failure. |
1576 mov(result, Operand(0x7091)); | 1576 mov(result, Operand(0x7091)); |
1577 mov(scratch1, Operand(0x7191)); | 1577 mov(scratch1, Operand(0x7191)); |
1578 mov(scratch2, Operand(0x7291)); | 1578 mov(scratch2, Operand(0x7291)); |
1579 } | 1579 } |
1580 jmp(gc_required); | 1580 jmp(gc_required); |
1581 return; | 1581 return; |
1582 } | 1582 } |
1583 | 1583 |
1584 ASSERT(!result.is(scratch1)); | 1584 ASSERT(!result.is(scratch1)); |
1585 ASSERT(!result.is(scratch2)); | 1585 ASSERT(!result.is(scratch2)); |
1586 ASSERT(!scratch1.is(scratch2)); | 1586 ASSERT(!scratch1.is(scratch2)); |
1587 ASSERT(!scratch1.is(ip)); | 1587 ASSERT(!scratch1.is(ip)); |
1588 ASSERT(!scratch2.is(ip)); | 1588 ASSERT(!scratch2.is(ip)); |
1589 | 1589 |
1590 // Make object size into bytes. | 1590 // Make object size into bytes. |
1591 if ((flags & SIZE_IN_WORDS) != 0) { | 1591 if ((flags & SIZE_IN_WORDS) != 0) { |
1592 object_size *= kPointerSize; | 1592 object_size *= kPointerSize; |
1593 } | 1593 } |
1594 ASSERT_EQ(0, object_size & kObjectAlignmentMask); | 1594 ASSERT_EQ(0, object_size & kObjectAlignmentMask); |
1595 | 1595 |
1596 // Check relative positions of allocation top and limit addresses. | 1596 // Check relative positions of allocation top and limit addresses. |
1597 // The values must be adjacent in memory to allow the use of LDM. | 1597 // The values must be adjacent in memory to allow the use of LDM. |
1598 // Also, assert that the registers are numbered such that the values | 1598 // Also, assert that the registers are numbered such that the values |
1599 // are loaded in the correct order. | 1599 // are loaded in the correct order. |
1600 ExternalReference new_space_allocation_top = | 1600 ExternalReference allocation_top = |
1601 ExternalReference::new_space_allocation_top_address(isolate()); | 1601 AllocationUtils::GetAllocationTopReference(isolate(), flags); |
1602 ExternalReference new_space_allocation_limit = | 1602 ExternalReference allocation_limit = |
1603 ExternalReference::new_space_allocation_limit_address(isolate()); | 1603 AllocationUtils::GetAllocationLimitReference(isolate(), flags); |
| 1604 |
1604 intptr_t top = | 1605 intptr_t top = |
1605 reinterpret_cast<intptr_t>(new_space_allocation_top.address()); | 1606 reinterpret_cast<intptr_t>(allocation_top.address()); |
1606 intptr_t limit = | 1607 intptr_t limit = |
1607 reinterpret_cast<intptr_t>(new_space_allocation_limit.address()); | 1608 reinterpret_cast<intptr_t>(allocation_limit.address()); |
1608 ASSERT((limit - top) == kPointerSize); | 1609 ASSERT((limit - top) == kPointerSize); |
1609 ASSERT(result.code() < ip.code()); | 1610 ASSERT(result.code() < ip.code()); |
1610 | 1611 |
1611 // Set up allocation top address and object size registers. | 1612 // Set up allocation top address and object size registers. |
1612 Register topaddr = scratch1; | 1613 Register topaddr = scratch1; |
1613 Register obj_size_reg = scratch2; | 1614 Register obj_size_reg = scratch2; |
1614 mov(topaddr, Operand(new_space_allocation_top)); | 1615 mov(topaddr, Operand(allocation_top)); |
1615 Operand obj_size_operand = Operand(object_size); | 1616 Operand obj_size_operand = Operand(object_size); |
1616 if (!obj_size_operand.is_single_instruction(this)) { | 1617 if (!obj_size_operand.is_single_instruction(this)) { |
1617 // We are about to steal IP, so we need to load this value first | 1618 // We are about to steal IP, so we need to load this value first |
1618 mov(obj_size_reg, obj_size_operand); | 1619 mov(obj_size_reg, obj_size_operand); |
1619 } | 1620 } |
1620 | 1621 |
1621 // This code stores a temporary value in ip. This is OK, as the code below | 1622 // This code stores a temporary value in ip. This is OK, as the code below |
1622 // does not need ip for implicit literal generation. | 1623 // does not need ip for implicit literal generation. |
1623 if ((flags & RESULT_CONTAINS_TOP) == 0) { | 1624 if ((flags & RESULT_CONTAINS_TOP) == 0) { |
1624 // Load allocation top into result and allocation limit into ip. | 1625 // Load allocation top into result and allocation limit into ip. |
1625 ldm(ia, topaddr, result.bit() | ip.bit()); | 1626 ldm(ia, topaddr, result.bit() | ip.bit()); |
1626 } else { | 1627 } else { |
1627 if (emit_debug_code()) { | 1628 if (emit_debug_code()) { |
1628 // Assert that result actually contains top on entry. ip is used | 1629 // Assert that result actually contains top on entry. ip is used |
1629 // immediately below so this use of ip does not cause difference with | 1630 // immediately below so this use of ip does not cause difference with |
1630 // respect to register content between debug and release mode. | 1631 // respect to register content between debug and release mode. |
1631 ldr(ip, MemOperand(topaddr)); | 1632 ldr(ip, MemOperand(topaddr)); |
1632 cmp(result, ip); | 1633 cmp(result, ip); |
1633 Check(eq, "Unexpected allocation top"); | 1634 Check(eq, "Unexpected allocation top"); |
1634 } | 1635 } |
1635 // Load allocation limit into ip. Result already contains allocation top. | 1636 // Load allocation limit into ip. Result already contains allocation top. |
1636 ldr(ip, MemOperand(topaddr, limit - top)); | 1637 ldr(ip, MemOperand(topaddr, limit - top)); |
1637 } | 1638 } |
1638 | 1639 |
1639 if ((flags & DOUBLE_ALIGNMENT) != 0) { | 1640 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
1640 // Align the next allocation. Storing the filler map without checking top is | 1641 // Align the next allocation. Storing the filler map without checking top is |
1641 // always safe because the limit of the heap is always aligned. | 1642 // always safe because the limit of the heap is always aligned. |
| 1643 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0); |
1642 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); | 1644 ASSERT(kPointerAlignment * 2 == kDoubleAlignment); |
1643 and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC); | 1645 and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC); |
1644 Label aligned; | 1646 Label aligned; |
1645 b(eq, &aligned); | 1647 b(eq, &aligned); |
1646 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); | 1648 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); |
1647 str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex)); | 1649 str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex)); |
1648 bind(&aligned); | 1650 bind(&aligned); |
1649 } | 1651 } |
1650 | 1652 |
1651 // Calculate new top and bail out if new space is exhausted. Use result | 1653 // Calculate new top and bail out if new space is exhausted. Use result |
(...skipping 16 matching lines...) Expand all Loading... |
1668 } | 1670 } |
1669 } | 1671 } |
1670 | 1672 |
1671 | 1673 |
1672 void MacroAssembler::AllocateInNewSpace(Register object_size, | 1674 void MacroAssembler::AllocateInNewSpace(Register object_size, |
1673 Register result, | 1675 Register result, |
1674 Register scratch1, | 1676 Register scratch1, |
1675 Register scratch2, | 1677 Register scratch2, |
1676 Label* gc_required, | 1678 Label* gc_required, |
1677 AllocationFlags flags) { | 1679 AllocationFlags flags) { |
| 1680 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0); |
1678 if (!FLAG_inline_new) { | 1681 if (!FLAG_inline_new) { |
1679 if (emit_debug_code()) { | 1682 if (emit_debug_code()) { |
1680 // Trash the registers to simulate an allocation failure. | 1683 // Trash the registers to simulate an allocation failure. |
1681 mov(result, Operand(0x7091)); | 1684 mov(result, Operand(0x7091)); |
1682 mov(scratch1, Operand(0x7191)); | 1685 mov(scratch1, Operand(0x7191)); |
1683 mov(scratch2, Operand(0x7291)); | 1686 mov(scratch2, Operand(0x7291)); |
1684 } | 1687 } |
1685 jmp(gc_required); | 1688 jmp(gc_required); |
1686 return; | 1689 return; |
1687 } | 1690 } |
(...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1851 scratch1, | 1854 scratch1, |
1852 scratch2); | 1855 scratch2); |
1853 } | 1856 } |
1854 | 1857 |
1855 | 1858 |
1856 void MacroAssembler::AllocateTwoByteConsString(Register result, | 1859 void MacroAssembler::AllocateTwoByteConsString(Register result, |
1857 Register length, | 1860 Register length, |
1858 Register scratch1, | 1861 Register scratch1, |
1859 Register scratch2, | 1862 Register scratch2, |
1860 Label* gc_required) { | 1863 Label* gc_required) { |
1861 AllocateInNewSpace(ConsString::kSize, | 1864 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
1862 result, | 1865 TAG_OBJECT); |
1863 scratch1, | |
1864 scratch2, | |
1865 gc_required, | |
1866 TAG_OBJECT); | |
1867 | 1866 |
1868 InitializeNewString(result, | 1867 InitializeNewString(result, |
1869 length, | 1868 length, |
1870 Heap::kConsStringMapRootIndex, | 1869 Heap::kConsStringMapRootIndex, |
1871 scratch1, | 1870 scratch1, |
1872 scratch2); | 1871 scratch2); |
1873 } | 1872 } |
1874 | 1873 |
1875 | 1874 |
1876 void MacroAssembler::AllocateAsciiConsString(Register result, | 1875 void MacroAssembler::AllocateAsciiConsString(Register result, |
1877 Register length, | 1876 Register length, |
1878 Register scratch1, | 1877 Register scratch1, |
1879 Register scratch2, | 1878 Register scratch2, |
1880 Label* gc_required) { | 1879 Label* gc_required) { |
1881 AllocateInNewSpace(ConsString::kSize, | 1880 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
1882 result, | 1881 TAG_OBJECT); |
1883 scratch1, | |
1884 scratch2, | |
1885 gc_required, | |
1886 TAG_OBJECT); | |
1887 | 1882 |
1888 InitializeNewString(result, | 1883 InitializeNewString(result, |
1889 length, | 1884 length, |
1890 Heap::kConsAsciiStringMapRootIndex, | 1885 Heap::kConsAsciiStringMapRootIndex, |
1891 scratch1, | 1886 scratch1, |
1892 scratch2); | 1887 scratch2); |
1893 } | 1888 } |
1894 | 1889 |
1895 | 1890 |
1896 void MacroAssembler::AllocateTwoByteSlicedString(Register result, | 1891 void MacroAssembler::AllocateTwoByteSlicedString(Register result, |
1897 Register length, | 1892 Register length, |
1898 Register scratch1, | 1893 Register scratch1, |
1899 Register scratch2, | 1894 Register scratch2, |
1900 Label* gc_required) { | 1895 Label* gc_required) { |
1901 AllocateInNewSpace(SlicedString::kSize, | 1896 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
1902 result, | 1897 TAG_OBJECT); |
1903 scratch1, | |
1904 scratch2, | |
1905 gc_required, | |
1906 TAG_OBJECT); | |
1907 | 1898 |
1908 InitializeNewString(result, | 1899 InitializeNewString(result, |
1909 length, | 1900 length, |
1910 Heap::kSlicedStringMapRootIndex, | 1901 Heap::kSlicedStringMapRootIndex, |
1911 scratch1, | 1902 scratch1, |
1912 scratch2); | 1903 scratch2); |
1913 } | 1904 } |
1914 | 1905 |
1915 | 1906 |
1916 void MacroAssembler::AllocateAsciiSlicedString(Register result, | 1907 void MacroAssembler::AllocateAsciiSlicedString(Register result, |
1917 Register length, | 1908 Register length, |
1918 Register scratch1, | 1909 Register scratch1, |
1919 Register scratch2, | 1910 Register scratch2, |
1920 Label* gc_required) { | 1911 Label* gc_required) { |
1921 AllocateInNewSpace(SlicedString::kSize, | 1912 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
1922 result, | 1913 TAG_OBJECT); |
1923 scratch1, | |
1924 scratch2, | |
1925 gc_required, | |
1926 TAG_OBJECT); | |
1927 | 1914 |
1928 InitializeNewString(result, | 1915 InitializeNewString(result, |
1929 length, | 1916 length, |
1930 Heap::kSlicedAsciiStringMapRootIndex, | 1917 Heap::kSlicedAsciiStringMapRootIndex, |
1931 scratch1, | 1918 scratch1, |
1932 scratch2); | 1919 scratch2); |
1933 } | 1920 } |
1934 | 1921 |
1935 | 1922 |
1936 void MacroAssembler::CompareObjectType(Register object, | 1923 void MacroAssembler::CompareObjectType(Register object, |
(...skipping 1339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3276 // Allocates a heap number or jumps to the need_gc label if the young space | 3263 // Allocates a heap number or jumps to the need_gc label if the young space |
3277 // is full and a scavenge is needed. | 3264 // is full and a scavenge is needed. |
3278 void MacroAssembler::AllocateHeapNumber(Register result, | 3265 void MacroAssembler::AllocateHeapNumber(Register result, |
3279 Register scratch1, | 3266 Register scratch1, |
3280 Register scratch2, | 3267 Register scratch2, |
3281 Register heap_number_map, | 3268 Register heap_number_map, |
3282 Label* gc_required, | 3269 Label* gc_required, |
3283 TaggingMode tagging_mode) { | 3270 TaggingMode tagging_mode) { |
3284 // Allocate an object in the heap for the heap number and tag it as a heap | 3271 // Allocate an object in the heap for the heap number and tag it as a heap |
3285 // object. | 3272 // object. |
3286 AllocateInNewSpace(HeapNumber::kSize, | 3273 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, |
3287 result, | 3274 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS); |
3288 scratch1, | |
3289 scratch2, | |
3290 gc_required, | |
3291 tagging_mode == TAG_RESULT ? TAG_OBJECT : | |
3292 NO_ALLOCATION_FLAGS); | |
3293 | 3275 |
3294 // Store heap number map in the allocated object. | 3276 // Store heap number map in the allocated object. |
3295 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 3277 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
3296 if (tagging_mode == TAG_RESULT) { | 3278 if (tagging_mode == TAG_RESULT) { |
3297 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); | 3279 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); |
3298 } else { | 3280 } else { |
3299 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); | 3281 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); |
3300 } | 3282 } |
3301 } | 3283 } |
3302 | 3284 |
(...skipping 718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4021 void CodePatcher::EmitCondition(Condition cond) { | 4003 void CodePatcher::EmitCondition(Condition cond) { |
4022 Instr instr = Assembler::instr_at(masm_.pc_); | 4004 Instr instr = Assembler::instr_at(masm_.pc_); |
4023 instr = (instr & ~kCondMask) | cond; | 4005 instr = (instr & ~kCondMask) | cond; |
4024 masm_.emit(instr); | 4006 masm_.emit(instr); |
4025 } | 4007 } |
4026 | 4008 |
4027 | 4009 |
4028 } } // namespace v8::internal | 4010 } } // namespace v8::internal |
4029 | 4011 |
4030 #endif // V8_TARGET_ARCH_ARM | 4012 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |