| OLD | NEW |
| 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. |
| 6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
| 7 | 7 |
| 8 #include "vm/code_patcher.h" | 8 #include "vm/code_patcher.h" |
| 9 #include "vm/cpu.h" | 9 #include "vm/cpu.h" |
| 10 #include "vm/instructions.h" | 10 #include "vm/instructions.h" |
| 11 #include "vm/object.h" | 11 #include "vm/object.h" |
| 12 | 12 |
| 13 namespace dart { | 13 namespace dart { |
| 14 | 14 |
| 15 void CodePatcher::PatchInstanceCallAt(uword return_address, | |
| 16 const Code& code, | |
| 17 uword new_target) { | |
| 18 ASSERT(code.ContainsInstructionAt(return_address)); | |
| 19 CallPattern call(return_address, code); | |
| 20 call.SetTargetAddress(new_target); | |
| 21 } | |
| 22 | |
| 23 | |
| 24 class PoolPointerCall : public ValueObject { | 15 class PoolPointerCall : public ValueObject { |
| 25 public: | 16 public: |
| 26 PoolPointerCall(uword pc, const Code& code) | 17 PoolPointerCall(uword pc, const Code& code) |
| 27 : end_(pc), | 18 : end_(pc), |
| 28 object_pool_(ObjectPool::Handle(code.GetObjectPool())) { | 19 object_pool_(ObjectPool::Handle(code.GetObjectPool())) { |
| 29 // Last instruction: blr ip0. | 20 // Last instruction: blr ip0. |
| 30 ASSERT(*(reinterpret_cast<uint32_t*>(end_) - 1) == 0xd63f0200); | 21 ASSERT(*(reinterpret_cast<uint32_t*>(end_) - 1) == 0xd63f0200); |
| 31 InstructionPattern::DecodeLoadWordFromPool( | 22 InstructionPattern::DecodeLoadWordFromPool( |
| 32 end_ - Instr::kInstrSize, ®_, &index_); | 23 end_ - 2 * Instr::kInstrSize, ®_, &index_); |
| 33 } | 24 } |
| 34 | 25 |
| 35 intptr_t pp_index() const { | 26 intptr_t pp_index() const { |
| 36 return index_; | 27 return index_; |
| 37 } | 28 } |
| 38 | 29 |
| 39 uword Target() const { | 30 RawCode* Target() const { |
| 40 return object_pool_.RawValueAt(pp_index()); | 31 return reinterpret_cast<RawCode*>(object_pool_.ObjectAt(pp_index())); |
| 41 } | 32 } |
| 42 | 33 |
| 43 void SetTarget(uword target) const { | 34 void SetTarget(const Code& target) const { |
| 44 object_pool_.SetRawValueAt(pp_index(), target); | 35 object_pool_.SetObjectAt(pp_index(), target); |
| 45 // No need to flush the instruction cache, since the code is not modified. | 36 // No need to flush the instruction cache, since the code is not modified. |
| 46 } | 37 } |
| 47 | 38 |
| 48 private: | 39 private: |
| 49 static const int kCallPatternSize = 3 * Instr::kInstrSize; | 40 static const int kCallPatternSize = 3 * Instr::kInstrSize; |
| 50 uword end_; | 41 uword end_; |
| 51 const ObjectPool& object_pool_; | 42 const ObjectPool& object_pool_; |
| 52 Register reg_; | 43 Register reg_; |
| 53 intptr_t index_; | 44 intptr_t index_; |
| 54 DISALLOW_IMPLICIT_CONSTRUCTORS(PoolPointerCall); | 45 DISALLOW_IMPLICIT_CONSTRUCTORS(PoolPointerCall); |
| 55 }; | 46 }; |
| 56 | 47 |
| 57 | 48 |
| 58 uword CodePatcher::GetStaticCallTargetAt(uword return_address, | 49 RawCode* CodePatcher::GetStaticCallTargetAt(uword return_address, |
| 59 const Code& code) { | 50 const Code& code) { |
| 60 ASSERT(code.ContainsInstructionAt(return_address)); | 51 ASSERT(code.ContainsInstructionAt(return_address)); |
| 61 PoolPointerCall call(return_address, code); | 52 PoolPointerCall call(return_address, code); |
| 62 return call.Target(); | 53 return call.Target(); |
| 63 } | 54 } |
| 64 | 55 |
| 65 | 56 |
| 66 void CodePatcher::PatchStaticCallAt(uword return_address, | 57 void CodePatcher::PatchStaticCallAt(uword return_address, |
| 67 const Code& code, | 58 const Code& code, |
| 68 uword new_target) { | 59 const Code& new_target) { |
| 69 PatchPoolPointerCallAt(return_address, code, new_target); | 60 PatchPoolPointerCallAt(return_address, code, new_target); |
| 70 } | 61 } |
| 71 | 62 |
| 72 | 63 |
| 73 void CodePatcher::PatchPoolPointerCallAt(uword return_address, | 64 void CodePatcher::PatchPoolPointerCallAt(uword return_address, |
| 74 const Code& code, | 65 const Code& code, |
| 75 uword new_target) { | 66 const Code& new_target) { |
| 76 ASSERT(code.ContainsInstructionAt(return_address)); | 67 ASSERT(code.ContainsInstructionAt(return_address)); |
| 77 PoolPointerCall call(return_address, code); | 68 PoolPointerCall call(return_address, code); |
| 78 call.SetTarget(new_target); | 69 call.SetTarget(new_target); |
| 79 } | 70 } |
| 80 | 71 |
| 81 | 72 |
| 82 void CodePatcher::InsertCallAt(uword start, uword target) { | 73 void CodePatcher::InsertDeoptimizationCallAt(uword start, uword target) { |
| 83 // The inserted call should not overlap the lazy deopt jump code. | 74 // The inserted call should not overlap the lazy deopt jump code. |
| 84 ASSERT(start + CallPattern::kLengthInBytes <= target); | 75 ASSERT(start + CallPattern::kDeoptCallLengthInBytes <= target); |
| 85 CallPattern::InsertAt(start, target); | 76 CallPattern::InsertDeoptCallAt(start, target); |
| 86 } | 77 } |
| 87 | 78 |
| 88 | 79 |
| 89 uword CodePatcher::GetInstanceCallAt(uword return_address, | 80 RawCode* CodePatcher::GetInstanceCallAt(uword return_address, |
| 90 const Code& code, | 81 const Code& code, |
| 91 ICData* ic_data) { | 82 ICData* ic_data) { |
| 92 ASSERT(code.ContainsInstructionAt(return_address)); | 83 ASSERT(code.ContainsInstructionAt(return_address)); |
| 93 CallPattern call(return_address, code); | 84 CallPattern call(return_address, code); |
| 94 if (ic_data != NULL) { | 85 if (ic_data != NULL) { |
| 95 *ic_data = call.IcData(); | 86 *ic_data = call.IcData(); |
| 96 } | 87 } |
| 97 return call.TargetAddress(); | 88 return call.TargetCode(); |
| 98 } | 89 } |
| 99 | 90 |
| 100 | 91 |
| 101 intptr_t CodePatcher::InstanceCallSizeInBytes() { | 92 intptr_t CodePatcher::InstanceCallSizeInBytes() { |
| 102 // The instance call instruction sequence has a variable size on ARM64. | 93 // The instance call instruction sequence has a variable size on ARM64. |
| 103 UNREACHABLE(); | 94 UNREACHABLE(); |
| 104 return 0; | 95 return 0; |
| 105 } | 96 } |
| 106 | 97 |
| 107 | 98 |
| 108 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt( | 99 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt( |
| 109 uword return_address, const Code& code, ICData* ic_data_result) { | 100 uword return_address, const Code& code, ICData* ic_data_result) { |
| 110 ASSERT(code.ContainsInstructionAt(return_address)); | 101 ASSERT(code.ContainsInstructionAt(return_address)); |
| 111 CallPattern static_call(return_address, code); | 102 CallPattern static_call(return_address, code); |
| 112 ICData& ic_data = ICData::Handle(); | 103 ICData& ic_data = ICData::Handle(); |
| 113 ic_data ^= static_call.IcData(); | 104 ic_data ^= static_call.IcData(); |
| 114 if (ic_data_result != NULL) { | 105 if (ic_data_result != NULL) { |
| 115 *ic_data_result = ic_data.raw(); | 106 *ic_data_result = ic_data.raw(); |
| 116 } | 107 } |
| 117 return ic_data.GetTargetAt(0); | 108 return ic_data.GetTargetAt(0); |
| 118 } | 109 } |
| 119 | 110 |
| 120 | 111 |
| 121 void CodePatcher::PatchNativeCallAt(uword return_address, | 112 void CodePatcher::PatchNativeCallAt(uword return_address, |
| 122 const Code& code, | 113 const Code& code, |
| 123 NativeFunction target, | 114 NativeFunction target, |
| 124 const Code& trampoline) { | 115 const Code& trampoline) { |
| 125 ASSERT(code.ContainsInstructionAt(return_address)); | 116 ASSERT(code.ContainsInstructionAt(return_address)); |
| 126 NativeCallPattern call(return_address, code); | 117 NativeCallPattern call(return_address, code); |
| 127 call.set_target(trampoline.EntryPoint()); | 118 call.set_target(trampoline); |
| 128 call.set_native_function(target); | 119 call.set_native_function(target); |
| 129 } | 120 } |
| 130 | 121 |
| 131 | 122 |
| 132 uword CodePatcher::GetNativeCallAt(uword return_address, | 123 RawCode* CodePatcher::GetNativeCallAt(uword return_address, |
| 133 const Code& code, | 124 const Code& code, |
| 134 NativeFunction* target) { | 125 NativeFunction* target) { |
| 135 ASSERT(code.ContainsInstructionAt(return_address)); | 126 ASSERT(code.ContainsInstructionAt(return_address)); |
| 136 NativeCallPattern call(return_address, code); | 127 NativeCallPattern call(return_address, code); |
| 137 *target = call.native_function(); | 128 *target = call.native_function(); |
| 138 return call.target(); | 129 return call.target(); |
| 139 } | 130 } |
| 140 | 131 |
| 141 | 132 |
| 142 // This class pattern matches on a load from the object pool. Loading on | 133 // This class pattern matches on a load from the object pool. Loading on |
| 143 // ARM64 is complicated because it can take more than one form. We | 134 // ARM64 is complicated because it can take more than one form. We |
| 144 // match backwards from the end of the sequence so we can reuse the code for | 135 // match backwards from the end of the sequence so we can reuse the code for |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 176 | 167 |
| 177 RawObject* CodePatcher::GetEdgeCounterAt(uword pc, const Code& code) { | 168 RawObject* CodePatcher::GetEdgeCounterAt(uword pc, const Code& code) { |
| 178 ASSERT(code.ContainsInstructionAt(pc)); | 169 ASSERT(code.ContainsInstructionAt(pc)); |
| 179 EdgeCounter counter(pc, code); | 170 EdgeCounter counter(pc, code); |
| 180 return counter.edge_counter(); | 171 return counter.edge_counter(); |
| 181 } | 172 } |
| 182 | 173 |
| 183 } // namespace dart | 174 } // namespace dart |
| 184 | 175 |
| 185 #endif // defined TARGET_ARCH_ARM64 | 176 #endif // defined TARGET_ARCH_ARM64 |
| OLD | NEW |