| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1557 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask))); | 1557 tst(t1, Operand(Smi::FromInt(PropertyDetails::TypeField::kMask))); |
| 1558 b(ne, miss); | 1558 b(ne, miss); |
| 1559 | 1559 |
| 1560 // Get the value at the masked, scaled index and return. | 1560 // Get the value at the masked, scaled index and return. |
| 1561 const int kValueOffset = | 1561 const int kValueOffset = |
| 1562 SeededNumberDictionary::kElementsStartOffset + kPointerSize; | 1562 SeededNumberDictionary::kElementsStartOffset + kPointerSize; |
| 1563 ldr(result, FieldMemOperand(t2, kValueOffset)); | 1563 ldr(result, FieldMemOperand(t2, kValueOffset)); |
| 1564 } | 1564 } |
| 1565 | 1565 |
| 1566 | 1566 |
| 1567 void MacroAssembler::AllocateInNewSpace(int object_size, | 1567 void MacroAssembler::Allocate(int object_size, |
| 1568 Register result, | 1568 Register result, |
| 1569 Register scratch1, | 1569 Register scratch1, |
| 1570 Register scratch2, | 1570 Register scratch2, |
| 1571 Label* gc_required, | 1571 Label* gc_required, |
| 1572 AllocationFlags flags) { | 1572 AllocationFlags flags) { |
| 1573 if (!FLAG_inline_new) { | 1573 if (!FLAG_inline_new) { |
| 1574 if (emit_debug_code()) { | 1574 if (emit_debug_code()) { |
| 1575 // Trash the registers to simulate an allocation failure. | 1575 // Trash the registers to simulate an allocation failure. |
| 1576 mov(result, Operand(0x7091)); | 1576 mov(result, Operand(0x7091)); |
| 1577 mov(scratch1, Operand(0x7191)); | 1577 mov(scratch1, Operand(0x7191)); |
| 1578 mov(scratch2, Operand(0x7291)); | 1578 mov(scratch2, Operand(0x7291)); |
| 1579 } | 1579 } |
| 1580 jmp(gc_required); | 1580 jmp(gc_required); |
| 1581 return; | 1581 return; |
| 1582 } | 1582 } |
| 1583 | 1583 |
| 1584 ASSERT(!result.is(scratch1)); | 1584 ASSERT(!result.is(scratch1)); |
| 1585 ASSERT(!result.is(scratch2)); | 1585 ASSERT(!result.is(scratch2)); |
| 1586 ASSERT(!scratch1.is(scratch2)); | 1586 ASSERT(!scratch1.is(scratch2)); |
| 1587 ASSERT(!scratch1.is(ip)); | 1587 ASSERT(!scratch1.is(ip)); |
| 1588 ASSERT(!scratch2.is(ip)); | 1588 ASSERT(!scratch2.is(ip)); |
| 1589 | 1589 |
| 1590 // Make object size into bytes. | 1590 // Make object size into bytes. |
| 1591 if ((flags & SIZE_IN_WORDS) != 0) { | 1591 if ((flags & SIZE_IN_WORDS) != 0) { |
| 1592 object_size *= kPointerSize; | 1592 object_size *= kPointerSize; |
| 1593 } | 1593 } |
| 1594 ASSERT_EQ(0, object_size & kObjectAlignmentMask); | 1594 ASSERT_EQ(0, object_size & kObjectAlignmentMask); |
| 1595 | 1595 |
| 1596 // Check relative positions of allocation top and limit addresses. | 1596 // Check relative positions of allocation top and limit addresses. |
| 1597 // The values must be adjacent in memory to allow the use of LDM. | 1597 // The values must be adjacent in memory to allow the use of LDM. |
| 1598 // Also, assert that the registers are numbered such that the values | 1598 // Also, assert that the registers are numbered such that the values |
| 1599 // are loaded in the correct order. | 1599 // are loaded in the correct order. |
| 1600 ExternalReference new_space_allocation_top = | 1600 ExternalReference allocation_top = ((flags & PRETENURE) != 0) ? |
| 1601 ExternalReference::old_pointer_space_allocation_top_address(isolate()) : |
| 1601 ExternalReference::new_space_allocation_top_address(isolate()); | 1602 ExternalReference::new_space_allocation_top_address(isolate()); |
| 1602 ExternalReference new_space_allocation_limit = | 1603 ExternalReference allocation_limit = ((flags & PRETENURE) != 0) ? |
| 1604 ExternalReference::old_pointer_space_allocation_limit_address( |
| 1605 isolate()) : |
| 1603 ExternalReference::new_space_allocation_limit_address(isolate()); | 1606 ExternalReference::new_space_allocation_limit_address(isolate()); |
| 1607 |
| 1604 intptr_t top = | 1608 intptr_t top = |
| 1605 reinterpret_cast<intptr_t>(new_space_allocation_top.address()); | 1609 reinterpret_cast<intptr_t>(allocation_top.address()); |
| 1606 intptr_t limit = | 1610 intptr_t limit = |
| 1607 reinterpret_cast<intptr_t>(new_space_allocation_limit.address()); | 1611 reinterpret_cast<intptr_t>(allocation_limit.address()); |
| 1608 ASSERT((limit - top) == kPointerSize); | 1612 ASSERT((limit - top) == kPointerSize); |
| 1609 ASSERT(result.code() < ip.code()); | 1613 ASSERT(result.code() < ip.code()); |
| 1610 | 1614 |
| 1611 // Set up allocation top address and object size registers. | 1615 // Set up allocation top address and object size registers. |
| 1612 Register topaddr = scratch1; | 1616 Register topaddr = scratch1; |
| 1613 Register obj_size_reg = scratch2; | 1617 Register obj_size_reg = scratch2; |
| 1614 mov(topaddr, Operand(new_space_allocation_top)); | 1618 mov(topaddr, Operand(allocation_top)); |
| 1615 Operand obj_size_operand = Operand(object_size); | 1619 Operand obj_size_operand = Operand(object_size); |
| 1616 if (!obj_size_operand.is_single_instruction(this)) { | 1620 if (!obj_size_operand.is_single_instruction(this)) { |
| 1617 // We are about to steal IP, so we need to load this value first | 1621 // We are about to steal IP, so we need to load this value first |
| 1618 mov(obj_size_reg, obj_size_operand); | 1622 mov(obj_size_reg, obj_size_operand); |
| 1619 } | 1623 } |
| 1620 | 1624 |
| 1621 // This code stores a temporary value in ip. This is OK, as the code below | 1625 // This code stores a temporary value in ip. This is OK, as the code below |
| 1622 // does not need ip for implicit literal generation. | 1626 // does not need ip for implicit literal generation. |
| 1623 if ((flags & RESULT_CONTAINS_TOP) == 0) { | 1627 if ((flags & RESULT_CONTAINS_TOP) == 0) { |
| 1624 // Load allocation top into result and allocation limit into ip. | 1628 // Load allocation top into result and allocation limit into ip. |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1668 } | 1672 } |
| 1669 } | 1673 } |
| 1670 | 1674 |
| 1671 | 1675 |
| 1672 void MacroAssembler::AllocateInNewSpace(Register object_size, | 1676 void MacroAssembler::AllocateInNewSpace(Register object_size, |
| 1673 Register result, | 1677 Register result, |
| 1674 Register scratch1, | 1678 Register scratch1, |
| 1675 Register scratch2, | 1679 Register scratch2, |
| 1676 Label* gc_required, | 1680 Label* gc_required, |
| 1677 AllocationFlags flags) { | 1681 AllocationFlags flags) { |
| 1682 ASSERT((flags & PRETENURE) == 0); |
| 1678 if (!FLAG_inline_new) { | 1683 if (!FLAG_inline_new) { |
| 1679 if (emit_debug_code()) { | 1684 if (emit_debug_code()) { |
| 1680 // Trash the registers to simulate an allocation failure. | 1685 // Trash the registers to simulate an allocation failure. |
| 1681 mov(result, Operand(0x7091)); | 1686 mov(result, Operand(0x7091)); |
| 1682 mov(scratch1, Operand(0x7191)); | 1687 mov(scratch1, Operand(0x7191)); |
| 1683 mov(scratch2, Operand(0x7291)); | 1688 mov(scratch2, Operand(0x7291)); |
| 1684 } | 1689 } |
| 1685 jmp(gc_required); | 1690 jmp(gc_required); |
| 1686 return; | 1691 return; |
| 1687 } | 1692 } |
| (...skipping 163 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1851 scratch1, | 1856 scratch1, |
| 1852 scratch2); | 1857 scratch2); |
| 1853 } | 1858 } |
| 1854 | 1859 |
| 1855 | 1860 |
| 1856 void MacroAssembler::AllocateTwoByteConsString(Register result, | 1861 void MacroAssembler::AllocateTwoByteConsString(Register result, |
| 1857 Register length, | 1862 Register length, |
| 1858 Register scratch1, | 1863 Register scratch1, |
| 1859 Register scratch2, | 1864 Register scratch2, |
| 1860 Label* gc_required) { | 1865 Label* gc_required) { |
| 1861 AllocateInNewSpace(ConsString::kSize, | 1866 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
| 1862 result, | 1867 TAG_OBJECT); |
| 1863 scratch1, | |
| 1864 scratch2, | |
| 1865 gc_required, | |
| 1866 TAG_OBJECT); | |
| 1867 | 1868 |
| 1868 InitializeNewString(result, | 1869 InitializeNewString(result, |
| 1869 length, | 1870 length, |
| 1870 Heap::kConsStringMapRootIndex, | 1871 Heap::kConsStringMapRootIndex, |
| 1871 scratch1, | 1872 scratch1, |
| 1872 scratch2); | 1873 scratch2); |
| 1873 } | 1874 } |
| 1874 | 1875 |
| 1875 | 1876 |
| 1876 void MacroAssembler::AllocateAsciiConsString(Register result, | 1877 void MacroAssembler::AllocateAsciiConsString(Register result, |
| 1877 Register length, | 1878 Register length, |
| 1878 Register scratch1, | 1879 Register scratch1, |
| 1879 Register scratch2, | 1880 Register scratch2, |
| 1880 Label* gc_required) { | 1881 Label* gc_required) { |
| 1881 AllocateInNewSpace(ConsString::kSize, | 1882 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required, |
| 1882 result, | 1883 TAG_OBJECT); |
| 1883 scratch1, | |
| 1884 scratch2, | |
| 1885 gc_required, | |
| 1886 TAG_OBJECT); | |
| 1887 | 1884 |
| 1888 InitializeNewString(result, | 1885 InitializeNewString(result, |
| 1889 length, | 1886 length, |
| 1890 Heap::kConsAsciiStringMapRootIndex, | 1887 Heap::kConsAsciiStringMapRootIndex, |
| 1891 scratch1, | 1888 scratch1, |
| 1892 scratch2); | 1889 scratch2); |
| 1893 } | 1890 } |
| 1894 | 1891 |
| 1895 | 1892 |
| 1896 void MacroAssembler::AllocateTwoByteSlicedString(Register result, | 1893 void MacroAssembler::AllocateTwoByteSlicedString(Register result, |
| 1897 Register length, | 1894 Register length, |
| 1898 Register scratch1, | 1895 Register scratch1, |
| 1899 Register scratch2, | 1896 Register scratch2, |
| 1900 Label* gc_required) { | 1897 Label* gc_required) { |
| 1901 AllocateInNewSpace(SlicedString::kSize, | 1898 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
| 1902 result, | 1899 TAG_OBJECT); |
| 1903 scratch1, | |
| 1904 scratch2, | |
| 1905 gc_required, | |
| 1906 TAG_OBJECT); | |
| 1907 | 1900 |
| 1908 InitializeNewString(result, | 1901 InitializeNewString(result, |
| 1909 length, | 1902 length, |
| 1910 Heap::kSlicedStringMapRootIndex, | 1903 Heap::kSlicedStringMapRootIndex, |
| 1911 scratch1, | 1904 scratch1, |
| 1912 scratch2); | 1905 scratch2); |
| 1913 } | 1906 } |
| 1914 | 1907 |
| 1915 | 1908 |
| 1916 void MacroAssembler::AllocateAsciiSlicedString(Register result, | 1909 void MacroAssembler::AllocateAsciiSlicedString(Register result, |
| 1917 Register length, | 1910 Register length, |
| 1918 Register scratch1, | 1911 Register scratch1, |
| 1919 Register scratch2, | 1912 Register scratch2, |
| 1920 Label* gc_required) { | 1913 Label* gc_required) { |
| 1921 AllocateInNewSpace(SlicedString::kSize, | 1914 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required, |
| 1922 result, | 1915 TAG_OBJECT); |
| 1923 scratch1, | |
| 1924 scratch2, | |
| 1925 gc_required, | |
| 1926 TAG_OBJECT); | |
| 1927 | 1916 |
| 1928 InitializeNewString(result, | 1917 InitializeNewString(result, |
| 1929 length, | 1918 length, |
| 1930 Heap::kSlicedAsciiStringMapRootIndex, | 1919 Heap::kSlicedAsciiStringMapRootIndex, |
| 1931 scratch1, | 1920 scratch1, |
| 1932 scratch2); | 1921 scratch2); |
| 1933 } | 1922 } |
| 1934 | 1923 |
| 1935 | 1924 |
| 1936 void MacroAssembler::CompareObjectType(Register object, | 1925 void MacroAssembler::CompareObjectType(Register object, |
| (...skipping 1339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3276 // Allocates a heap number or jumps to the need_gc label if the young space | 3265 // Allocates a heap number or jumps to the need_gc label if the young space |
| 3277 // is full and a scavenge is needed. | 3266 // is full and a scavenge is needed. |
| 3278 void MacroAssembler::AllocateHeapNumber(Register result, | 3267 void MacroAssembler::AllocateHeapNumber(Register result, |
| 3279 Register scratch1, | 3268 Register scratch1, |
| 3280 Register scratch2, | 3269 Register scratch2, |
| 3281 Register heap_number_map, | 3270 Register heap_number_map, |
| 3282 Label* gc_required, | 3271 Label* gc_required, |
| 3283 TaggingMode tagging_mode) { | 3272 TaggingMode tagging_mode) { |
| 3284 // Allocate an object in the heap for the heap number and tag it as a heap | 3273 // Allocate an object in the heap for the heap number and tag it as a heap |
| 3285 // object. | 3274 // object. |
| 3286 AllocateInNewSpace(HeapNumber::kSize, | 3275 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required, |
| 3287 result, | 3276 tagging_mode == TAG_RESULT ? TAG_OBJECT : NO_ALLOCATION_FLAGS); |
| 3288 scratch1, | |
| 3289 scratch2, | |
| 3290 gc_required, | |
| 3291 tagging_mode == TAG_RESULT ? TAG_OBJECT : | |
| 3292 NO_ALLOCATION_FLAGS); | |
| 3293 | 3277 |
| 3294 // Store heap number map in the allocated object. | 3278 // Store heap number map in the allocated object. |
| 3295 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | 3279 AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); |
| 3296 if (tagging_mode == TAG_RESULT) { | 3280 if (tagging_mode == TAG_RESULT) { |
| 3297 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); | 3281 str(heap_number_map, FieldMemOperand(result, HeapObject::kMapOffset)); |
| 3298 } else { | 3282 } else { |
| 3299 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); | 3283 str(heap_number_map, MemOperand(result, HeapObject::kMapOffset)); |
| 3300 } | 3284 } |
| 3301 } | 3285 } |
| 3302 | 3286 |
| (...skipping 718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4021 void CodePatcher::EmitCondition(Condition cond) { | 4005 void CodePatcher::EmitCondition(Condition cond) { |
| 4022 Instr instr = Assembler::instr_at(masm_.pc_); | 4006 Instr instr = Assembler::instr_at(masm_.pc_); |
| 4023 instr = (instr & ~kCondMask) | cond; | 4007 instr = (instr & ~kCondMask) | cond; |
| 4024 masm_.emit(instr); | 4008 masm_.emit(instr); |
| 4025 } | 4009 } |
| 4026 | 4010 |
| 4027 | 4011 |
| 4028 } } // namespace v8::internal | 4012 } } // namespace v8::internal |
| 4029 | 4013 |
| 4030 #endif // V8_TARGET_ARCH_ARM | 4014 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |