OLD | NEW |
---|---|
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 2006 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2017 // address is loaded. | 2017 // address is loaded. |
2018 positions_recorder()->WriteRecordedPositions(); | 2018 positions_recorder()->WriteRecordedPositions(); |
2019 | 2019 |
2020 // Addresses always have 64 bits, so we shouldn't encounter NONE32. | 2020 // Addresses always have 64 bits, so we shouldn't encounter NONE32. |
2021 ASSERT(rmode != RelocInfo::NONE32); | 2021 ASSERT(rmode != RelocInfo::NONE32); |
2022 | 2022 |
2023 UseScratchRegisterScope temps(this); | 2023 UseScratchRegisterScope temps(this); |
2024 Register temp = temps.AcquireX(); | 2024 Register temp = temps.AcquireX(); |
2025 | 2025 |
2026 if (rmode == RelocInfo::NONE64) { | 2026 if (rmode == RelocInfo::NONE64) { |
2027 // Addresses are 48 bits so we never need to load the upper 16 bits. | |
ulan
2014/03/24 12:45:54
ASSERT_EQ(0, ((imm >> 48) & 0xffff));
| |
2027 uint64_t imm = reinterpret_cast<uint64_t>(target); | 2028 uint64_t imm = reinterpret_cast<uint64_t>(target); |
2028 movz(temp, (imm >> 0) & 0xffff, 0); | 2029 movz(temp, (imm >> 0) & 0xffff, 0); |
2029 movk(temp, (imm >> 16) & 0xffff, 16); | 2030 if (predictable_code_size()) { |
2030 movk(temp, (imm >> 32) & 0xffff, 32); | 2031 movk(temp, (imm >> 16) & 0xffff, 16); |
2031 movk(temp, (imm >> 48) & 0xffff, 48); | 2032 movk(temp, (imm >> 32) & 0xffff, 32); |
2033 } else { | |
2034 if (((imm >> 16) & 0xffff) != 0) movk(temp, (imm >> 16) & 0xffff, 16); | |
2035 if (((imm >> 32) & 0xffff) != 0) movk(temp, (imm >> 32) & 0xffff, 32); | |
ulan
2014/03/24 12:45:54
v8/test/mjsunit and v8/benchmarks did not hit the
| |
2036 } | |
2032 } else { | 2037 } else { |
2033 LoadRelocated(temp, Operand(reinterpret_cast<intptr_t>(target), rmode)); | 2038 LoadRelocated(temp, Operand(reinterpret_cast<intptr_t>(target), rmode)); |
2034 } | 2039 } |
2035 Blr(temp); | 2040 Blr(temp); |
2036 #ifdef DEBUG | 2041 #ifdef DEBUG |
2037 AssertSizeOfCodeGeneratedSince(&start_call, CallSize(target, rmode)); | 2042 AssertSizeOfCodeGeneratedSince(&start_call, CallSize(target, rmode)); |
2038 #endif | 2043 #endif |
2039 } | 2044 } |
2040 | 2045 |
2041 | 2046 |
(...skipping 26 matching lines...) Expand all Loading... | |
2068 } | 2073 } |
2069 | 2074 |
2070 | 2075 |
2071 int MacroAssembler::CallSize(Label* target) { | 2076 int MacroAssembler::CallSize(Label* target) { |
2072 USE(target); | 2077 USE(target); |
2073 return kInstructionSize; | 2078 return kInstructionSize; |
2074 } | 2079 } |
2075 | 2080 |
2076 | 2081 |
2077 int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode) { | 2082 int MacroAssembler::CallSize(Address target, RelocInfo::Mode rmode) { |
2078 USE(target); | |
2079 | |
2080 // Addresses always have 64 bits, so we shouldn't encounter NONE32. | 2083 // Addresses always have 64 bits, so we shouldn't encounter NONE32. |
2081 ASSERT(rmode != RelocInfo::NONE32); | 2084 ASSERT(rmode != RelocInfo::NONE32); |
2082 | 2085 |
2083 if (rmode == RelocInfo::NONE64) { | 2086 if (rmode == RelocInfo::NONE64) { |
2084 return kCallSizeWithoutRelocation; | 2087 int result = kMaxCallSizeWithoutRelocation; |
2088 if (!predictable_code_size()) { | |
2089 uint64_t imm = reinterpret_cast<uint64_t>(target); | |
2090 if (((imm >> 16) & 0xffff) == 0) result -= kInstructionSize; | |
2091 if (((imm >> 32) & 0xffff) == 0) result -= kInstructionSize; | |
2092 } | |
2093 return result; | |
2085 } else { | 2094 } else { |
2086 return kCallSizeWithRelocation; | 2095 return kCallSizeWithRelocation; |
2087 } | 2096 } |
2088 } | 2097 } |
2089 | 2098 |
2090 | 2099 |
2091 int MacroAssembler::CallSize(Handle<Code> code, | 2100 int MacroAssembler::CallSize(Handle<Code> code, |
2092 RelocInfo::Mode rmode, | 2101 RelocInfo::Mode rmode, |
2093 TypeFeedbackId ast_id) { | 2102 TypeFeedbackId ast_id) { |
2094 USE(code); | |
2095 USE(ast_id); | 2103 USE(ast_id); |
2096 | 2104 AllowDeferredHandleDereference embedding_raw_address; |
2097 // Addresses always have 64 bits, so we shouldn't encounter NONE32. | 2105 return CallSize(reinterpret_cast<Address>(code.location()), rmode); |
2098 ASSERT(rmode != RelocInfo::NONE32); | |
2099 | |
2100 if (rmode == RelocInfo::NONE64) { | |
2101 return kCallSizeWithoutRelocation; | |
2102 } else { | |
2103 return kCallSizeWithRelocation; | |
2104 } | |
2105 } | 2106 } |
2106 | 2107 |
2107 | 2108 |
2108 | 2109 |
2109 | 2110 |
2110 | 2111 |
2111 void MacroAssembler::JumpForHeapNumber(Register object, | 2112 void MacroAssembler::JumpForHeapNumber(Register object, |
2112 Register heap_number_map, | 2113 Register heap_number_map, |
2113 Label* on_heap_number, | 2114 Label* on_heap_number, |
2114 Label* on_not_heap_number) { | 2115 Label* on_not_heap_number) { |
(...skipping 3047 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
5162 } | 5163 } |
5163 } | 5164 } |
5164 | 5165 |
5165 | 5166 |
5166 #undef __ | 5167 #undef __ |
5167 | 5168 |
5168 | 5169 |
5169 } } // namespace v8::internal | 5170 } } // namespace v8::internal |
5170 | 5171 |
5171 #endif // V8_TARGET_ARCH_ARM64 | 5172 #endif // V8_TARGET_ARCH_ARM64 |
OLD | NEW |