| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1536 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1547 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask))); | 1547 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask))); |
| 1548 b(ne, miss); | 1548 b(ne, miss); |
| 1549 | 1549 |
| 1550 // Get the value at the masked, scaled index and return. | 1550 // Get the value at the masked, scaled index and return. |
| 1551 const int kValueOffset = | 1551 const int kValueOffset = |
| 1552 SeededNumberDictionary::kElementsStartOffset + kPointerSize; | 1552 SeededNumberDictionary::kElementsStartOffset + kPointerSize; |
| 1553 ldr(result, FieldMemOperand(t2, kValueOffset)); | 1553 ldr(result, FieldMemOperand(t2, kValueOffset)); |
| 1554 } | 1554 } |
| 1555 | 1555 |
| 1556 | 1556 |
| 1557 void MacroAssembler::AllocateInNewSpace(int object_size, | 1557 void MacroAssembler::Allocate(int object_size, |
| 1558 Register result, | 1558 Register result, |
| 1559 Register scratch1, | 1559 Register scratch1, |
| 1560 Register scratch2, | 1560 Register scratch2, |
| 1561 Label* gc_required, | 1561 Label* gc_required, |
| 1562 AllocationFlags flags) { | 1562 AllocationFlags flags) { |
| 1563 if (!FLAG_inline_new) { | 1563 if (!FLAG_inline_new) { |
| 1564 if (emit_debug_code()) { | 1564 if (emit_debug_code()) { |
| 1565 // Trash the registers to simulate an allocation failure. | 1565 // Trash the registers to simulate an allocation failure. |
| 1566 mov(result, Operand(0x7091)); | 1566 mov(result, Operand(0x7091)); |
| 1567 mov(scratch1, Operand(0x7191)); | 1567 mov(scratch1, Operand(0x7191)); |
| 1568 mov(scratch2, Operand(0x7291)); | 1568 mov(scratch2, Operand(0x7291)); |
| 1569 } | 1569 } |
| 1570 jmp(gc_required); | 1570 jmp(gc_required); |
| 1571 return; | 1571 return; |
| 1572 } | 1572 } |
| 1573 | 1573 |
| 1574 ASSERT(!result.is(scratch1)); | 1574 ASSERT(!result.is(scratch1)); |
| 1575 ASSERT(!result.is(scratch2)); | 1575 ASSERT(!result.is(scratch2)); |
| 1576 ASSERT(!scratch1.is(scratch2)); | 1576 ASSERT(!scratch1.is(scratch2)); |
| 1577 ASSERT(!scratch1.is(ip)); | 1577 ASSERT(!scratch1.is(ip)); |
| 1578 ASSERT(!scratch2.is(ip)); | 1578 ASSERT(!scratch2.is(ip)); |
| 1579 | 1579 |
| 1580 // Make object size into bytes. | 1580 // Make object size into bytes. |
| 1581 if ((flags & SIZE_IN_WORDS) != 0) { | 1581 if ((flags & SIZE_IN_WORDS) != 0) { |
| 1582 object_size *= kPointerSize; | 1582 object_size *= kPointerSize; |
| 1583 } | 1583 } |
| 1584 ASSERT_EQ(0, object_size & kObjectAlignmentMask); | 1584 ASSERT_EQ(0, object_size & kObjectAlignmentMask); |
| 1585 | 1585 |
| 1586 // Check relative positions of allocation top and limit addresses. | 1586 // Check relative positions of allocation top and limit addresses. |
| 1587 // The values must be adjacent in memory to allow the use of LDM. | 1587 // The values must be adjacent in memory to allow the use of LDM. |
| 1588 // Also, assert that the registers are numbered such that the values | 1588 // Also, assert that the registers are numbered such that the values |
| 1589 // are loaded in the correct order. | 1589 // are loaded in the correct order. |
| 1590 ExternalReference new_space_allocation_top = | 1590 ExternalReference allocation_top = ((flags & PRETENURE) != 0) ? |
| 1591 ExternalReference::old_pointer_space_allocation_top_address(isolate()) : |
| 1591 ExternalReference::new_space_allocation_top_address(isolate()); | 1592 ExternalReference::new_space_allocation_top_address(isolate()); |
| 1592 ExternalReference new_space_allocation_limit = | 1593 ExternalReference allocation_limit = ((flags & PRETENURE) != 0) ? |
| 1594 ExternalReference::old_pointer_space_allocation_limit_address( |
| 1595 isolate()) : |
| 1593 ExternalReference::new_space_allocation_limit_address(isolate()); | 1596 ExternalReference::new_space_allocation_limit_address(isolate()); |
| 1597 |
| 1594 intptr_t top = | 1598 intptr_t top = |
| 1595 reinterpret_cast<intptr_t>(new_space_allocation_top.address()); | 1599 reinterpret_cast<intptr_t>(allocation_top.address()); |
| 1596 intptr_t limit = | 1600 intptr_t limit = |
| 1597 reinterpret_cast<intptr_t>(new_space_allocation_limit.address()); | 1601 reinterpret_cast<intptr_t>(allocation_limit.address()); |
| 1598 ASSERT((limit - top) == kPointerSize); | 1602 ASSERT((limit - top) == kPointerSize); |
| 1599 ASSERT(result.code() < ip.code()); | 1603 ASSERT(result.code() < ip.code()); |
| 1600 | 1604 |
| 1601 // Set up allocation top address and object size registers. | 1605 // Set up allocation top address and object size registers. |
| 1602 Register topaddr = scratch1; | 1606 Register topaddr = scratch1; |
| 1603 Register obj_size_reg = scratch2; | 1607 Register obj_size_reg = scratch2; |
| 1604 mov(topaddr, Operand(new_space_allocation_top)); | 1608 mov(topaddr, Operand(allocation_top)); |
| 1605 Operand obj_size_operand = Operand(object_size); | 1609 Operand obj_size_operand = Operand(object_size); |
| 1606 if (!obj_size_operand.is_single_instruction(this)) { | 1610 if (!obj_size_operand.is_single_instruction(this)) { |
| 1607 // We are about to steal IP, so we need to load this value first | 1611 // We are about to steal IP, so we need to load this value first |
| 1608 mov(obj_size_reg, obj_size_operand); | 1612 mov(obj_size_reg, obj_size_operand); |
| 1609 } | 1613 } |
| 1610 | 1614 |
| 1611 // This code stores a temporary value in ip. This is OK, as the code below | 1615 // This code stores a temporary value in ip. This is OK, as the code below |
| 1612 // does not need ip for implicit literal generation. | 1616 // does not need ip for implicit literal generation. |
| 1613 if ((flags & RESULT_CONTAINS_TOP) == 0) { | 1617 if ((flags & RESULT_CONTAINS_TOP) == 0) { |
| 1614 // Load allocation top into result and allocation limit into ip. | 1618 // Load allocation top into result and allocation limit into ip. |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1658 } | 1662 } |
| 1659 } | 1663 } |
| 1660 | 1664 |
| 1661 | 1665 |
| 1662 void MacroAssembler::AllocateInNewSpace(Register object_size, | 1666 void MacroAssembler::AllocateInNewSpace(Register object_size, |
| 1663 Register result, | 1667 Register result, |
| 1664 Register scratch1, | 1668 Register scratch1, |
| 1665 Register scratch2, | 1669 Register scratch2, |
| 1666 Label* gc_required, | 1670 Label* gc_required, |
| 1667 AllocationFlags flags) { | 1671 AllocationFlags flags) { |
| 1672 ASSERT((flags & PRETENURE) == 0); |
| 1668 if (!FLAG_inline_new) { | 1673 if (!FLAG_inline_new) { |
| 1669 if (emit_debug_code()) { | 1674 if (emit_debug_code()) { |
| 1670 // Trash the registers to simulate an allocation failure. | 1675 // Trash the registers to simulate an allocation failure. |
| 1671 mov(result, Operand(0x7091)); | 1676 mov(result, Operand(0x7091)); |
| 1672 mov(scratch1, Operand(0x7191)); | 1677 mov(scratch1, Operand(0x7191)); |
| 1673 mov(scratch2, Operand(0x7291)); | 1678 mov(scratch2, Operand(0x7291)); |
| 1674 } | 1679 } |
| 1675 jmp(gc_required); | 1680 jmp(gc_required); |
| 1676 return; | 1681 return; |
| 1677 } | 1682 } |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1841 scratch1, | 1846 scratch1, |
| 1842 scratch2); | 1847 scratch2); |
| 1843 } | 1848 } |
| 1844 | 1849 |
| 1845 | 1850 |
| 1846 void MacroAssembler::AllocateTwoByteConsString(Register result, | 1851 void MacroAssembler::AllocateTwoByteConsString(Register result, |
| 1847 Register length, | 1852 Register length, |
| 1848 Register scratch1, | 1853 Register scratch1, |
| 1849 Register scratch2, | 1854 Register scratch2, |
| 1850 Label* gc_required) { | 1855 Label* gc_required) { |
| 1851 AllocateInNewSpace(ConsString::kSize, | 1856 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
| 1852 result, | 1857 TAG_OBJECT); |
| 1853 scratch1, | |
| 1854 scratch2, | |
| 1855 gc_required, | |
| 1856 TAG_OBJECT); | |
| 1857 | 1858 |
| 1858 InitializeNewString(result, | 1859 InitializeNewString(result, |
| 1859 length, | 1860 length, |
| 1860 Heap::kConsStringMapRootIndex, | 1861 Heap::kConsStringMapRootIndex, |
| 1861 scratch1, | 1862 scratch1, |
| 1862 scratch2); | 1863 scratch2); |
| 1863 } | 1864 } |
| 1864 | 1865 |
| 1865 | 1866 |
| 1866 void MacroAssembler::AllocateAsciiConsString(Register result, | 1867 void MacroAssembler::AllocateAsciiConsString(Register result, |
| 1867 Register length, | 1868 Register length, |
| 1868 Register scratch1, | 1869 Register scratch1, |
| 1869 Register scratch2, | 1870 Register scratch2, |
| 1870 Label* gc_required) { | 1871 Label* gc_required) { |
| 1871 AllocateInNewSpace(ConsString::kSize, | 1872 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
| 1872 result, | 1873 TAG_OBJECT); |
| 1873 scratch1, | |
| 1874 scratch2, | |
| 1875 gc_required, | |
| 1876 TAG_OBJECT); | |
| 1877 | 1874 |
| 1878 InitializeNewString(result, | 1875 InitializeNewString(result, |
| 1879 length, | 1876 length, |
| 1880 Heap::kConsAsciiStringMapRootIndex, | 1877 Heap::kConsAsciiStringMapRootIndex, |
| 1881 scratch1, | 1878 scratch1, |
| 1882 scratch2); | 1879 scratch2); |
| 1883 } | 1880 } |
| 1884 | 1881 |
| 1885 | 1882 |
| 1886 void MacroAssembler::AllocateTwoByteSlicedString(Register result, | 1883 void MacroAssembler::AllocateTwoByteSlicedString(Register result, |
| 1887 Register length, | 1884 Register length, |
| 1888 Register scratch1, | 1885 Register scratch1, |
| 1889 Register scratch2, | 1886 Register scratch2, |
| 1890 Label* gc_required) { | 1887 Label* gc_required) { |
| 1891 AllocateInNewSpace(SlicedString::kSize, | 1888 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
| 1892 result, | 1889 TAG_OBJECT); |
| 1893 scratch1, | |
| 1894 scratch2, | |
| 1895 gc_required, | |
| 1896 TAG_OBJECT); | |
| 1897 | 1890 |
| 1898 InitializeNewString(result, | 1891 InitializeNewString(result, |
| 1899 length, | 1892 length, |
| 1900 Heap::kSlicedStringMapRootIndex, | 1893 Heap::kSlicedStringMapRootIndex, |
| 1901 scratch1, | 1894 scratch1, |
| 1902 scratch2); | 1895 scratch2); |
| 1903 } | 1896 } |
| 1904 | 1897 |
| 1905 | 1898 |
| 1906 void MacroAssembler::AllocateAsciiSlicedString(Register result, | 1899 void MacroAssembler::AllocateAsciiSlicedString(Register result, |
| 1907 Register length, | 1900 Register length, |
| 1908 Register scratch1, | 1901 Register scratch1, |
| 1909 Register scratch2, | 1902 Register scratch2, |
| 1910 Label* gc_required) { | 1903 Label* gc_required) { |
| 1911 AllocateInNewSpace(SlicedString::kSize, | 1904 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
| 1912 result, | 1905 TAG_OBJECT); |
| 1913 scratch1, | |
| 1914 scratch2, | |
| 1915 gc_required, | |
| 1916 TAG_OBJECT); | |
| 1917 | 1906 |
| 1918 InitializeNewString(result, | 1907 InitializeNewString(result, |
| 1919 length, | 1908 length, |
| 1920 Heap::kSlicedAsciiStringMapRootIndex, | 1909 Heap::kSlicedAsciiStringMapRootIndex, |
| 1921 scratch1, | 1910 scratch1, |
| 1922 scratch2); | 1911 scratch2); |
| 1923 } | 1912 } |
| 1924 | 1913 |
| 1925 | 1914 |
| 1926 void MacroAssembler::CompareObjectType(Register object, | 1915 void MacroAssembler::CompareObjectType(Register object, |
| (...skipping 1318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3245 // Allocates a heap number or jumps to the need_gc label if the young space | 3234 // Allocates a heap number or jumps to the need_gc label if the young space |
| 3246 // is full and a scavenge is needed. | 3235 // is full and a scavenge is needed. |
| 3247 void MacroAssembler::AllocateHeapNumber(Register result, | 3236 void MacroAssembler::AllocateHeapNumber(Register result, |
| 3248 Register scratch1, | 3237 Register scratch1, |
| 3249 Register scratch2, | 3238 Register scratch2, |
| 3250 Register heap_number_map, | 3239 Register heap_number_map, |
| 3251 Label* gc_required, | 3240 Label* gc_required, |
| 3252 TaggingMode tagging_mode) { | 3241 TaggingMode tagging_mode) { |
| 3253 // Allocate an object in the heap for the heap number and tag it as a heap | 3242 // Allocate an object in the heap for the heap number and tag it as a heap |
| 3254 // object. | 3243 // object. |
| 3255 AllocateInNewSpace(HeapNumber::kSize, | 3244 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, |
| 3256 result, | 3245 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS); |
| 3257 scratch1, | |
| 3258 scratch2, | |
| 3259 gc_required, | |
| 3260 tagging_mode == TAG_RESULT ? TAG_OBJECT : | |
| 3261 NO_ALLOCATION_FLAGS); | |
| 3262 | 3246 |
| 3263 // Store heap number map in the allocated object. | 3247 // Store heap number map in the allocated object. |
| 3264 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 3248 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
| 3265 if (tagging_mode == TAG_RESULT) { | 3249 if (tagging_mode == TAG_RESULT) { |
| 3266 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); | 3250 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); |
| 3267 } else { | 3251 } else { |
| 3268 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); | 3252 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); |
| 3269 } | 3253 } |
| 3270 } | 3254 } |
| 3271 | 3255 |
| (...skipping 718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3990 void CodePatcher::EmitCondition(Condition cond) { | 3974 void CodePatcher::EmitCondition(Condition cond) { |
| 3991 Instr instr = Assembler::instr_at(masm_.pc_); | 3975 Instr instr = Assembler::instr_at(masm_.pc_); |
| 3992 instr = (instr & ~kCondMask) | cond; | 3976 instr = (instr & ~kCondMask) | cond; |
| 3993 masm_.emit(instr); | 3977 masm_.emit(instr); |
| 3994 } | 3978 } |
| 3995 | 3979 |
| 3996 | 3980 |
| 3997 } } // namespace v8::internal | 3981 } } // namespace v8::internal |
| 3998 | 3982 |
| 3999 #endif // V8_TARGET_ARCH_ARM | 3983 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |