| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
| 6 // are met: | 6 // are met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 63 answer |= 1u << VFP3; | 63 answer |= 1u << VFP3; |
| 64 #endif // defined(__VFP_FP__) && !defined(__SOFTFP__) | 64 #endif // defined(__VFP_FP__) && !defined(__SOFTFP__) |
| 65 #ifdef CAN_USE_VFP_INSTRUCTIONS | 65 #ifdef CAN_USE_VFP_INSTRUCTIONS |
| 66 answer |= 1u << VFP3; | 66 answer |= 1u << VFP3; |
| 67 #endif // def CAN_USE_VFP_INSTRUCTIONS | 67 #endif // def CAN_USE_VFP_INSTRUCTIONS |
| 68 return answer; | 68 return answer; |
| 69 } | 69 } |
| 70 #endif // def __arm__ | 70 #endif // def __arm__ |
| 71 | 71 |
| 72 | 72 |
| 73 void CpuFeatures::Probe() { | 73 void CpuFeatures::Probe(bool portable) { |
| 74 #ifndef __arm__ | 74 #ifndef __arm__ |
| 75 // For the simulator=arm build, use VFP when FLAG_enable_vfp3 is enabled. | 75 // For the simulator=arm build, use VFP when FLAG_enable_vfp3 is enabled. |
| 76 if (FLAG_enable_vfp3) { | 76 if (FLAG_enable_vfp3) { |
| 77 supported_ |= 1u << VFP3; | 77 supported_ |= 1u << VFP3; |
| 78 } | 78 } |
| 79 // For the simulator=arm build, use ARMv7 when FLAG_enable_armv7 is enabled | 79 // For the simulator=arm build, use ARMv7 when FLAG_enable_armv7 is enabled |
| 80 if (FLAG_enable_armv7) { | 80 if (FLAG_enable_armv7) { |
| 81 supported_ |= 1u << ARMv7; | 81 supported_ |= 1u << ARMv7; |
| 82 } | 82 } |
| 83 #else // def __arm__ | 83 #else // def __arm__ |
| 84 if (Serializer::enabled()) { | 84 if (portable && Serializer::enabled()) { |
| 85 supported_ |= OS::CpuFeaturesImpliedByPlatform(); | 85 supported_ |= OS::CpuFeaturesImpliedByPlatform(); |
| 86 supported_ |= CpuFeaturesImpliedByCompiler(); | 86 supported_ |= CpuFeaturesImpliedByCompiler(); |
| 87 return; // No features if we might serialize. | 87 return; // No features if we might serialize. |
| 88 } | 88 } |
| 89 | 89 |
| 90 if (OS::ArmCpuHasFeature(VFP3)) { | 90 if (OS::ArmCpuHasFeature(VFP3)) { |
| 91 // This implementation also sets the VFP flags if | 91 // This implementation also sets the VFP flags if |
| 92 // runtime detection of VFP returns true. | 92 // runtime detection of VFP returns true. |
| 93 supported_ |= 1u << VFP3; | 93 supported_ |= 1u << VFP3; |
| 94 found_by_runtime_probing_ |= 1u << VFP3; | 94 found_by_runtime_probing_ |= 1u << VFP3; |
| 95 } | 95 } |
| 96 | 96 |
| 97 if (OS::ArmCpuHasFeature(ARMv7)) { | 97 if (OS::ArmCpuHasFeature(ARMv7)) { |
| 98 supported_ |= 1u << ARMv7; | 98 supported_ |= 1u << ARMv7; |
| 99 found_by_runtime_probing_ |= 1u << ARMv7; | 99 found_by_runtime_probing_ |= 1u << ARMv7; |
| 100 } | 100 } |
| 101 |
| 102 if (!portable) found_by_runtime_probing_ = 0; |
| 101 #endif | 103 #endif |
| 102 } | 104 } |
| 103 | 105 |
| 104 | 106 |
| 105 // ----------------------------------------------------------------------------- | 107 // ----------------------------------------------------------------------------- |
| 106 // Implementation of RelocInfo | 108 // Implementation of RelocInfo |
| 107 | 109 |
| 108 const int RelocInfo::kApplyMask = 0; | 110 const int RelocInfo::kApplyMask = 0; |
| 109 | 111 |
| 110 | 112 |
| (...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 309 al | B26 | L | NegOffset | fp.code() * B16; | 311 al | B26 | L | NegOffset | fp.code() * B16; |
| 310 static const Instr kStrRegFpNegOffsetPattern = | 312 static const Instr kStrRegFpNegOffsetPattern = |
| 311 al | B26 | NegOffset | fp.code() * B16; | 313 al | B26 | NegOffset | fp.code() * B16; |
| 312 static const Instr kLdrStrInstrTypeMask = 0xffff0000; | 314 static const Instr kLdrStrInstrTypeMask = 0xffff0000; |
| 313 static const Instr kLdrStrInstrArgumentMask = 0x0000ffff; | 315 static const Instr kLdrStrInstrArgumentMask = 0x0000ffff; |
| 314 static const Instr kLdrStrOffsetMask = 0x00000fff; | 316 static const Instr kLdrStrOffsetMask = 0x00000fff; |
| 315 | 317 |
| 316 static const int kMinimalBufferSize = 4*KB; | 318 static const int kMinimalBufferSize = 4*KB; |
| 317 | 319 |
| 318 Assembler::Assembler(void* buffer, int buffer_size) | 320 Assembler::Assembler(void* buffer, int buffer_size) |
| 319 : positions_recorder_(this) { | 321 : positions_recorder_(this), |
| 322 allow_peephole_optimization_(false) { |
| 320 Isolate* isolate = Isolate::Current(); | 323 Isolate* isolate = Isolate::Current(); |
| 324 // BUG(3245989): disable peephole optimization if crankshaft is enabled. |
| 325 allow_peephole_optimization_ = FLAG_peephole_optimization; |
| 321 if (buffer == NULL) { | 326 if (buffer == NULL) { |
| 322 // Do our own buffer management. | 327 // Do our own buffer management. |
| 323 if (buffer_size <= kMinimalBufferSize) { | 328 if (buffer_size <= kMinimalBufferSize) { |
| 324 buffer_size = kMinimalBufferSize; | 329 buffer_size = kMinimalBufferSize; |
| 325 | 330 |
| 326 if (isolate->assembler_spare_buffer() != NULL) { | 331 if (isolate->assembler_spare_buffer() != NULL) { |
| 327 buffer = isolate->assembler_spare_buffer(); | 332 buffer = isolate->assembler_spare_buffer(); |
| 328 isolate->set_assembler_spare_buffer(NULL); | 333 isolate->set_assembler_spare_buffer(NULL); |
| 329 } | 334 } |
| 330 } | 335 } |
| (...skipping 651 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 982 emit(cond | B27 | B25 | (imm24 & Imm24Mask)); | 987 emit(cond | B27 | B25 | (imm24 & Imm24Mask)); |
| 983 | 988 |
| 984 if (cond == al) { | 989 if (cond == al) { |
| 985 // Dead code is a good location to emit the constant pool. | 990 // Dead code is a good location to emit the constant pool. |
| 986 CheckConstPool(false, false); | 991 CheckConstPool(false, false); |
| 987 } | 992 } |
| 988 } | 993 } |
| 989 | 994 |
| 990 | 995 |
| 991 void Assembler::bl(int branch_offset, Condition cond) { | 996 void Assembler::bl(int branch_offset, Condition cond) { |
| 997 positions_recorder()->WriteRecordedPositions(); |
| 992 ASSERT((branch_offset & 3) == 0); | 998 ASSERT((branch_offset & 3) == 0); |
| 993 int imm24 = branch_offset >> 2; | 999 int imm24 = branch_offset >> 2; |
| 994 ASSERT(is_int24(imm24)); | 1000 ASSERT(is_int24(imm24)); |
| 995 emit(cond | B27 | B25 | B24 | (imm24 & Imm24Mask)); | 1001 emit(cond | B27 | B25 | B24 | (imm24 & Imm24Mask)); |
| 996 } | 1002 } |
| 997 | 1003 |
| 998 | 1004 |
| 999 void Assembler::blx(int branch_offset) { // v5 and above | 1005 void Assembler::blx(int branch_offset) { // v5 and above |
| 1000 positions_recorder()->WriteRecordedPositions(); | 1006 positions_recorder()->WriteRecordedPositions(); |
| 1001 ASSERT((branch_offset & 1) == 0); | 1007 ASSERT((branch_offset & 1) == 0); |
| (...skipping 643 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1645 // It expects to find the address just after the svc instruction. | 1651 // It expects to find the address just after the svc instruction. |
| 1646 BlockConstPoolFor(2); | 1652 BlockConstPoolFor(2); |
| 1647 if (code >= 0) { | 1653 if (code >= 0) { |
| 1648 svc(kStopInterruptCode + code, cond); | 1654 svc(kStopInterruptCode + code, cond); |
| 1649 } else { | 1655 } else { |
| 1650 svc(kStopInterruptCode + kMaxStopCode, cond); | 1656 svc(kStopInterruptCode + kMaxStopCode, cond); |
| 1651 } | 1657 } |
| 1652 emit(reinterpret_cast<Instr>(msg)); | 1658 emit(reinterpret_cast<Instr>(msg)); |
| 1653 #else // def __arm__ | 1659 #else // def __arm__ |
| 1654 #ifdef CAN_USE_ARMV5_INSTRUCTIONS | 1660 #ifdef CAN_USE_ARMV5_INSTRUCTIONS |
| 1661 ASSERT(cond == al); |
| 1655 bkpt(0); | 1662 bkpt(0); |
| 1656 #else // ndef CAN_USE_ARMV5_INSTRUCTIONS | 1663 #else // ndef CAN_USE_ARMV5_INSTRUCTIONS |
| 1657 svc(0x9f0001); | 1664 svc(0x9f0001, cond); |
| 1658 #endif // ndef CAN_USE_ARMV5_INSTRUCTIONS | 1665 #endif // ndef CAN_USE_ARMV5_INSTRUCTIONS |
| 1659 #endif // def __arm__ | 1666 #endif // def __arm__ |
| 1660 } | 1667 } |
| 1661 | 1668 |
| 1662 | 1669 |
| 1663 void Assembler::bkpt(uint32_t imm16) { // v5 and above | 1670 void Assembler::bkpt(uint32_t imm16) { // v5 and above |
| 1664 ASSERT(is_uint16(imm16)); | 1671 ASSERT(is_uint16(imm16)); |
| 1665 emit(al | B24 | B21 | (imm16 >> 4)*B8 | 7*B4 | (imm16 & 0xf)); | 1672 emit(al | B24 | B21 | (imm16 >> 4)*B8 | 7*B4 | (imm16 & 0xf)); |
| 1666 } | 1673 } |
| 1667 | 1674 |
| (...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1821 | 1828 |
| 1822 | 1829 |
| 1823 // Support for VFP. | 1830 // Support for VFP. |
| 1824 | 1831 |
| 1825 void Assembler::vldr(const DwVfpRegister dst, | 1832 void Assembler::vldr(const DwVfpRegister dst, |
| 1826 const Register base, | 1833 const Register base, |
| 1827 int offset, | 1834 int offset, |
| 1828 const Condition cond) { | 1835 const Condition cond) { |
| 1829 // Ddst = MEM(Rbase + offset). | 1836 // Ddst = MEM(Rbase + offset). |
| 1830 // Instruction details available in ARM DDI 0406A, A8-628. | 1837 // Instruction details available in ARM DDI 0406A, A8-628. |
| 1831 // cond(31-28) | 1101(27-24)| 1001(23-20) | Rbase(19-16) | | 1838 // cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) | |
| 1832 // Vdst(15-12) | 1011(11-8) | offset | 1839 // Vdst(15-12) | 1011(11-8) | offset |
| 1833 ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); | 1840 ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); |
| 1841 int u = 1; |
| 1842 if (offset < 0) { |
| 1843 offset = -offset; |
| 1844 u = 0; |
| 1845 } |
| 1834 ASSERT(offset % 4 == 0); | 1846 ASSERT(offset % 4 == 0); |
| 1835 ASSERT((offset / 4) < 256); | 1847 ASSERT((offset / 4) < 256); |
| 1836 ASSERT(offset >= 0); | 1848 ASSERT(offset >= 0); |
| 1837 emit(cond | 0xD9*B20 | base.code()*B16 | dst.code()*B12 | | 1849 emit(cond | u*B23 | 0xD1*B20 | base.code()*B16 | dst.code()*B12 | |
| 1838 0xB*B8 | ((offset / 4) & 255)); | 1850 0xB*B8 | ((offset / 4) & 255)); |
| 1839 } | 1851 } |
| 1840 | 1852 |
| 1841 | 1853 |
| 1842 void Assembler::vldr(const SwVfpRegister dst, | 1854 void Assembler::vldr(const SwVfpRegister dst, |
| 1843 const Register base, | 1855 const Register base, |
| 1844 int offset, | 1856 int offset, |
| 1845 const Condition cond) { | 1857 const Condition cond) { |
| 1846 // Sdst = MEM(Rbase + offset). | 1858 // Sdst = MEM(Rbase + offset). |
| 1847 // Instruction details available in ARM DDI 0406A, A8-628. | 1859 // Instruction details available in ARM DDI 0406A, A8-628. |
| 1848 // cond(31-28) | 1101(27-24)| 1001(23-20) | Rbase(19-16) | | 1860 // cond(31-28) | 1101(27-24)| U001(23-20) | Rbase(19-16) | |
| 1849 // Vdst(15-12) | 1010(11-8) | offset | 1861 // Vdst(15-12) | 1010(11-8) | offset |
| 1850 ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); | 1862 ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); |
| 1863 int u = 1; |
| 1864 if (offset < 0) { |
| 1865 offset = -offset; |
| 1866 u = 0; |
| 1867 } |
| 1851 ASSERT(offset % 4 == 0); | 1868 ASSERT(offset % 4 == 0); |
| 1852 ASSERT((offset / 4) < 256); | 1869 ASSERT((offset / 4) < 256); |
| 1853 ASSERT(offset >= 0); | 1870 ASSERT(offset >= 0); |
| 1854 int sd, d; | 1871 int sd, d; |
| 1855 dst.split_code(&sd, &d); | 1872 dst.split_code(&sd, &d); |
| 1856 emit(cond | d*B22 | 0xD9*B20 | base.code()*B16 | sd*B12 | | 1873 emit(cond | u*B23 | d*B22 | 0xD1*B20 | base.code()*B16 | sd*B12 | |
| 1857 0xA*B8 | ((offset / 4) & 255)); | 1874 0xA*B8 | ((offset / 4) & 255)); |
| 1858 } | 1875 } |
| 1859 | 1876 |
| 1860 | 1877 |
| 1861 void Assembler::vstr(const DwVfpRegister src, | 1878 void Assembler::vstr(const DwVfpRegister src, |
| 1862 const Register base, | 1879 const Register base, |
| 1863 int offset, | 1880 int offset, |
| 1864 const Condition cond) { | 1881 const Condition cond) { |
| 1865 // MEM(Rbase + offset) = Dsrc. | 1882 // MEM(Rbase + offset) = Dsrc. |
| 1866 // Instruction details available in ARM DDI 0406A, A8-786. | 1883 // Instruction details available in ARM DDI 0406A, A8-786. |
| 1867 // cond(31-28) | 1101(27-24)| 1000(23-20) | | Rbase(19-16) | | 1884 // cond(31-28) | 1101(27-24)| U000(23-20) | | Rbase(19-16) | |
| 1868 // Vsrc(15-12) | 1011(11-8) | (offset/4) | 1885 // Vsrc(15-12) | 1011(11-8) | (offset/4) |
| 1869 ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); | 1886 ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); |
| 1887 int u = 1; |
| 1888 if (offset < 0) { |
| 1889 offset = -offset; |
| 1890 u = 0; |
| 1891 } |
| 1870 ASSERT(offset % 4 == 0); | 1892 ASSERT(offset % 4 == 0); |
| 1871 ASSERT((offset / 4) < 256); | 1893 ASSERT((offset / 4) < 256); |
| 1872 ASSERT(offset >= 0); | 1894 ASSERT(offset >= 0); |
| 1873 emit(cond | 0xD8*B20 | base.code()*B16 | src.code()*B12 | | 1895 emit(cond | u*B23 | 0xD0*B20 | base.code()*B16 | src.code()*B12 | |
| 1874 0xB*B8 | ((offset / 4) & 255)); | 1896 0xB*B8 | ((offset / 4) & 255)); |
| 1875 } | 1897 } |
| 1876 | 1898 |
| 1877 | 1899 |
| 1878 void Assembler::vstr(const SwVfpRegister src, | 1900 void Assembler::vstr(const SwVfpRegister src, |
| 1879 const Register base, | 1901 const Register base, |
| 1880 int offset, | 1902 int offset, |
| 1881 const Condition cond) { | 1903 const Condition cond) { |
| 1882 // MEM(Rbase + offset) = SSrc. | 1904 // MEM(Rbase + offset) = SSrc. |
| 1883 // Instruction details available in ARM DDI 0406A, A8-786. | 1905 // Instruction details available in ARM DDI 0406A, A8-786. |
| 1884 // cond(31-28) | 1101(27-24)| 1000(23-20) | Rbase(19-16) | | 1906 // cond(31-28) | 1101(27-24)| U000(23-20) | Rbase(19-16) | |
| 1885 // Vdst(15-12) | 1010(11-8) | (offset/4) | 1907 // Vdst(15-12) | 1010(11-8) | (offset/4) |
| 1886 ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); | 1908 ASSERT(Isolate::Current()->cpu_features()->IsEnabled(VFP3)); |
| 1909 int u = 1; |
| 1910 if (offset < 0) { |
| 1911 offset = -offset; |
| 1912 u = 0; |
| 1913 } |
| 1887 ASSERT(offset % 4 == 0); | 1914 ASSERT(offset % 4 == 0); |
| 1888 ASSERT((offset / 4) < 256); | 1915 ASSERT((offset / 4) < 256); |
| 1889 ASSERT(offset >= 0); | 1916 ASSERT(offset >= 0); |
| 1890 int sd, d; | 1917 int sd, d; |
| 1891 src.split_code(&sd, &d); | 1918 src.split_code(&sd, &d); |
| 1892 emit(cond | d*B22 | 0xD8*B20 | base.code()*B16 | sd*B12 | | 1919 emit(cond | u*B23 | d*B22 | 0xD0*B20 | base.code()*B16 | sd*B12 | |
| 1893 0xA*B8 | ((offset / 4) & 255)); | 1920 0xA*B8 | ((offset / 4) & 255)); |
| 1894 } | 1921 } |
| 1895 | 1922 |
| 1896 | 1923 |
| 1897 static void DoubleAsTwoUInt32(double d, uint32_t* lo, uint32_t* hi) { | 1924 static void DoubleAsTwoUInt32(double d, uint32_t* lo, uint32_t* hi) { |
| 1898 uint64_t i; | 1925 uint64_t i; |
| 1899 memcpy(&i, &d, 8); | 1926 memcpy(&i, &d, 8); |
| 1900 | 1927 |
| 1901 *lo = i & 0xffffffff; | 1928 *lo = i & 0xffffffff; |
| 1902 *hi = i >> 32; | 1929 *hi = i >> 32; |
| (...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2406 | 2433 |
| 2407 | 2434 |
| 2408 void Assembler::RecordDebugBreakSlot() { | 2435 void Assembler::RecordDebugBreakSlot() { |
| 2409 positions_recorder()->WriteRecordedPositions(); | 2436 positions_recorder()->WriteRecordedPositions(); |
| 2410 CheckBuffer(); | 2437 CheckBuffer(); |
| 2411 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT); | 2438 RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT); |
| 2412 } | 2439 } |
| 2413 | 2440 |
| 2414 | 2441 |
| 2415 void Assembler::RecordComment(const char* msg) { | 2442 void Assembler::RecordComment(const char* msg) { |
| 2416 if (FLAG_debug_code) { | 2443 if (FLAG_code_comments) { |
| 2417 CheckBuffer(); | 2444 CheckBuffer(); |
| 2418 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg)); | 2445 RecordRelocInfo(RelocInfo::COMMENT, reinterpret_cast<intptr_t>(msg)); |
| 2419 } | 2446 } |
| 2420 } | 2447 } |
| 2421 | 2448 |
| 2422 | 2449 |
| 2423 void Assembler::GrowBuffer() { | 2450 void Assembler::GrowBuffer() { |
| 2424 if (!own_buffer_) FATAL("external code buffer is too small"); | 2451 if (!own_buffer_) FATAL("external code buffer is too small"); |
| 2425 | 2452 |
| 2426 // Compute new buffer size. | 2453 // Compute new buffer size. |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2464 RelocInfo& rinfo = prinfo_[i]; | 2491 RelocInfo& rinfo = prinfo_[i]; |
| 2465 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && | 2492 ASSERT(rinfo.rmode() != RelocInfo::COMMENT && |
| 2466 rinfo.rmode() != RelocInfo::POSITION); | 2493 rinfo.rmode() != RelocInfo::POSITION); |
| 2467 if (rinfo.rmode() != RelocInfo::JS_RETURN) { | 2494 if (rinfo.rmode() != RelocInfo::JS_RETURN) { |
| 2468 rinfo.set_pc(rinfo.pc() + pc_delta); | 2495 rinfo.set_pc(rinfo.pc() + pc_delta); |
| 2469 } | 2496 } |
| 2470 } | 2497 } |
| 2471 } | 2498 } |
| 2472 | 2499 |
| 2473 | 2500 |
| 2501 void Assembler::db(uint8_t data) { |
| 2502 CheckBuffer(); |
| 2503 *reinterpret_cast<uint8_t*>(pc_) = data; |
| 2504 pc_ += sizeof(uint8_t); |
| 2505 } |
| 2506 |
| 2507 |
| 2508 void Assembler::dd(uint32_t data) { |
| 2509 CheckBuffer(); |
| 2510 *reinterpret_cast<uint32_t*>(pc_) = data; |
| 2511 pc_ += sizeof(uint32_t); |
| 2512 } |
| 2513 |
| 2514 |
| 2474 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2515 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
| 2475 RelocInfo rinfo(pc_, rmode, data); // we do not try to reuse pool constants | 2516 RelocInfo rinfo(pc_, rmode, data); // we do not try to reuse pool constants |
| 2476 if (rmode >= RelocInfo::JS_RETURN && rmode <= RelocInfo::DEBUG_BREAK_SLOT) { | 2517 if (rmode >= RelocInfo::JS_RETURN && rmode <= RelocInfo::DEBUG_BREAK_SLOT) { |
| 2477 // Adjust code for new modes. | 2518 // Adjust code for new modes. |
| 2478 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) | 2519 ASSERT(RelocInfo::IsDebugBreakSlot(rmode) |
| 2479 || RelocInfo::IsJSReturn(rmode) | 2520 || RelocInfo::IsJSReturn(rmode) |
| 2480 || RelocInfo::IsComment(rmode) | 2521 || RelocInfo::IsComment(rmode) |
| 2481 || RelocInfo::IsPosition(rmode)); | 2522 || RelocInfo::IsPosition(rmode)); |
| 2482 // These modes do not need an entry in the constant pool. | 2523 // These modes do not need an entry in the constant pool. |
| 2483 } else { | 2524 } else { |
| (...skipping 125 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2609 | 2650 |
| 2610 // Since a constant pool was just emitted, move the check offset forward by | 2651 // Since a constant pool was just emitted, move the check offset forward by |
| 2611 // the standard interval. | 2652 // the standard interval. |
| 2612 next_buffer_check_ = pc_offset() + kCheckConstInterval; | 2653 next_buffer_check_ = pc_offset() + kCheckConstInterval; |
| 2613 } | 2654 } |
| 2614 | 2655 |
| 2615 | 2656 |
| 2616 } } // namespace v8::internal | 2657 } } // namespace v8::internal |
| 2617 | 2658 |
| 2618 #endif // V8_TARGET_ARCH_ARM | 2659 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |