OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2006 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2017 // address is loaded. | 2017 // address is loaded. |
2018 positions_recorder()->WriteRecordedPositions(); | 2018 positions_recorder()->WriteRecordedPositions(); |
2019 | 2019 |
2020 // Addresses always have 64 bits, so we shouldn't encounter NONE32. | 2020 // Addresses always have 64 bits, so we shouldn't encounter NONE32. |
2021 ASSERT(rmode != RelocInfo::NONE32); | 2021 ASSERT(rmode != RelocInfo::NONE32); |
2022 | 2022 |
2023 UseScratchRegisterScope temps(this); | 2023 UseScratchRegisterScope temps(this); |
2024 Register temp = temps.AcquireX(); | 2024 Register temp = temps.AcquireX(); |
2025 | 2025 |
2026 if (rmode == RelocInfo::NONE64) { | 2026 if (rmode == RelocInfo::NONE64) { |
| 2027 // Addresses are 48 bits so we never need to load the upper 16 bits. |
2027 uint64_t imm = reinterpret_cast<uint64_t>(target); | 2028 uint64_t imm = reinterpret_cast<uint64_t>(target); |
| 2029 // If we don't use ARM tagged addresses, the 16 higher bits must be 0. |
| 2030 ASSERT(((imm >> 48) & 0xffff) == 0); |
2028 movz(temp, (imm >> 0) & 0xffff, 0); | 2031 movz(temp, (imm >> 0) & 0xffff, 0); |
2029 movk(temp, (imm >> 16) & 0xffff, 16); | 2032 movk(temp, (imm >> 16) & 0xffff, 16); |
2030 movk(temp, (imm >> 32) & 0xffff, 32); | 2033 movk(temp, (imm >> 32) & 0xffff, 32); |
2031 movk(temp, (imm >> 48) & 0xffff, 48); | |
2032 } else { | 2034 } else { |
2033 LoadRelocated(temp, Operand(reinterpret_cast<intptr_t>(target), rmode)); | 2035 LoadRelocated(temp, Operand(reinterpret_cast<intptr_t>(target), rmode)); |
2034 } | 2036 } |
2035 Blr(temp); | 2037 Blr(temp); |
2036 #ifdef DEBUG | 2038 #ifdef DEBUG |
2037 AssertSizeOfCodeGeneratedSince(&start_call, CallSize(target, rmode)); | 2039 AssertSizeOfCodeGeneratedSince(&start_call, CallSize(target, rmode)); |
2038 #endif | 2040 #endif |
2039 } | 2041 } |
2040 | 2042 |
2041 | 2043 |
(...skipping 3120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5162 } | 5164 } |
5163 } | 5165 } |
5164 | 5166 |
5165 | 5167 |
5166 #undef __ | 5168 #undef __ |
5167 | 5169 |
5168 | 5170 |
5169 } } // namespace v8::internal | 5171 } } // namespace v8::internal |
5170 | 5172 |
5171 #endif // V8_TARGET_ARCH_ARM64 | 5173 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |