| OLD | NEW |
| 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
| 6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
| 7 | 7 |
| 8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
| 9 #include "vm/code_patcher.h" | 9 #include "vm/code_patcher.h" |
| 10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
| 11 #include "vm/dart_entry.h" | 11 #include "vm/dart_entry.h" |
| 12 #include "vm/flow_graph_compiler.h" | 12 #include "vm/flow_graph_compiler.h" |
| 13 #include "vm/instructions.h" | 13 #include "vm/instructions.h" |
| 14 #include "vm/object.h" | 14 #include "vm/object.h" |
| 15 #include "vm/raw_object.h" | 15 #include "vm/raw_object.h" |
| 16 | 16 |
| 17 namespace dart { | 17 namespace dart { |
| 18 | 18 |
| 19 // The expected pattern of a Dart unoptimized call (static and instance): | 19 |
| 20 // 0: 49 8b 9f imm32 mov RBX, [PP + off] | 20 static bool MatchesPattern(uword addr, int16_t* pattern, intptr_t size) { |
| 21 // 7: 41 ff 97 imm32 call [PP + off] | 21 uint8_t* bytes = reinterpret_cast<uint8_t*>(addr); |
| 22 // 14 <- return address | 22 for (intptr_t i = 0; i < size; i++) { |
| 23 int16_t val = pattern[i]; |
| 24 if ((val >= 0) && (val != bytes[i])) { |
| 25 return false; |
| 26 } |
| 27 } |
| 28 return true; |
| 29 } |
| 30 |
| 31 |
| 32 intptr_t IndexFromPPLoad(uword start) { |
| 33 int32_t offset = *reinterpret_cast<int32_t*>(start); |
| 34 return ObjectPool::IndexFromOffset(offset); |
| 35 } |
| 36 |
| 37 |
| 23 class UnoptimizedCall : public ValueObject { | 38 class UnoptimizedCall : public ValueObject { |
| 24 public: | 39 public: |
| 25 UnoptimizedCall(uword return_address, const Code& code) | 40 UnoptimizedCall(uword return_address, const Code& code) |
| 26 : object_pool_(ObjectPool::Handle(code.GetObjectPool())), | 41 : object_pool_(ObjectPool::Handle(code.GetObjectPool())), |
| 27 start_(return_address - kCallPatternSize) { | 42 start_(return_address - kCallPatternSize) { |
| 28 ASSERT(IsValid(return_address)); | |
| 29 ASSERT((kCallPatternSize - 7) == Assembler::kCallExternalLabelSize); | 43 ASSERT((kCallPatternSize - 7) == Assembler::kCallExternalLabelSize); |
| 44 ASSERT(IsValid()); |
| 30 } | 45 } |
| 31 | 46 |
| 32 static const int kCallPatternSize = 14; | 47 static const int kCallPatternSize = 22; |
| 33 | 48 |
| 34 static bool IsValid(uword return_address) { | 49 bool IsValid() const { |
| 35 uint8_t* code_bytes = | 50 static int16_t pattern[kCallPatternSize] = { |
| 36 reinterpret_cast<uint8_t*>(return_address - kCallPatternSize); | 51 0x49, 0x8b, 0x9f, -1, -1, -1, -1, // movq RBX, [PP + offs] |
| 37 return (code_bytes[0] == 0x49) && (code_bytes[1] == 0x8B) && | 52 0x4d, 0x8b, 0xa7, -1, -1, -1, -1, // movq CR, [PP + offs] |
| 38 (code_bytes[2] == 0x9F) && | 53 0x4d, 0x8b, 0x5c, 0x24, 0x07, // movq TMP, [CR + entry_point_offs] |
| 39 (code_bytes[7] == 0x41) && (code_bytes[8] == 0xFF) && | 54 0x41, 0xff, 0xd3 // callq TMP |
| 40 (code_bytes[9] == 0x97); | 55 }; |
| 56 return MatchesPattern(start_, pattern, kCallPatternSize); |
| 41 } | 57 } |
| 42 | 58 |
| 43 intptr_t argument_index() const { | 59 intptr_t argument_index() const { |
| 44 return IndexFromPPLoad(start_ + 3); | 60 return IndexFromPPLoad(start_ + 3); |
| 45 } | 61 } |
| 46 | 62 |
| 47 RawObject* ic_data() const { | 63 RawObject* ic_data() const { |
| 48 return object_pool_.ObjectAt(argument_index()); | 64 return object_pool_.ObjectAt(argument_index()); |
| 49 } | 65 } |
| 50 | 66 |
| 51 uword target() const { | 67 RawCode* target() const { |
| 52 intptr_t index = IndexFromPPLoad(start_ + 10); | 68 intptr_t index = IndexFromPPLoad(start_ + 10); |
| 53 return object_pool_.RawValueAt(index); | 69 Code& code = Code::Handle(); |
| 70 code ^= object_pool_.ObjectAt(index); |
| 71 return code.raw(); |
| 54 } | 72 } |
| 55 | 73 |
| 56 void set_target(uword target) const { | 74 void set_target(const Code& target) const { |
| 57 intptr_t index = IndexFromPPLoad(start_ + 10); | 75 intptr_t index = IndexFromPPLoad(start_ + 10); |
| 58 object_pool_.SetRawValueAt(index, target); | 76 object_pool_.SetObjectAt(index, target); |
| 59 // No need to flush the instruction cache, since the code is not modified. | 77 // No need to flush the instruction cache, since the code is not modified. |
| 60 } | 78 } |
| 61 | 79 |
| 62 protected: | 80 protected: |
| 63 const ObjectPool& object_pool_; | 81 const ObjectPool& object_pool_; |
| 64 | 82 |
| 65 private: | 83 private: |
| 66 uword start_; | 84 uword start_; |
| 67 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall); | 85 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall); |
| 68 }; | 86 }; |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 115 ASSERT(test_ic_data.NumArgsTested() >= 0); | 133 ASSERT(test_ic_data.NumArgsTested() >= 0); |
| 116 #endif // DEBUG | 134 #endif // DEBUG |
| 117 } | 135 } |
| 118 | 136 |
| 119 private: | 137 private: |
| 120 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedStaticCall); | 138 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedStaticCall); |
| 121 }; | 139 }; |
| 122 | 140 |
| 123 | 141 |
| 124 // The expected pattern of a call where the target is loaded from | 142 // The expected pattern of a call where the target is loaded from |
| 125 // the object pool: | 143 // the object pool. |
| 126 // 0: 41 ff 97 imm32 call [PP + off] | |
| 127 // 7: <- return address | |
| 128 class PoolPointerCall : public ValueObject { | 144 class PoolPointerCall : public ValueObject { |
| 129 public: | 145 public: |
| 130 explicit PoolPointerCall(uword return_address, const Code& code) | 146 explicit PoolPointerCall(uword return_address, const Code& code) |
| 131 : start_(return_address - kCallPatternSize), | 147 : start_(return_address - kCallPatternSize), |
| 132 object_pool_(ObjectPool::Handle(code.GetObjectPool())) { | 148 object_pool_(ObjectPool::Handle(code.GetObjectPool())) { |
| 133 ASSERT(IsValid(return_address)); | 149 ASSERT(IsValid()); |
| 134 } | 150 } |
| 135 | 151 |
| 136 static const int kCallPatternSize = 7; | 152 static const int kCallPatternSize = 15; |
| 137 | 153 |
| 138 static bool IsValid(uword return_address) { | 154 bool IsValid() const { |
| 139 uint8_t* code_bytes = | 155 static int16_t pattern[kCallPatternSize] = { |
| 140 reinterpret_cast<uint8_t*>(return_address - kCallPatternSize); | 156 0x4d, 0x8b, 0xa7, -1, -1, -1, -1, // movq CR, [PP + offs] |
| 141 return (code_bytes[0] == 0x41) && (code_bytes[1] == 0xFF) && | 157 0x4d, 0x8b, 0x5c, 0x24, 0x07, // movq TMP, [CR + entry_point_off] |
| 142 (code_bytes[2] == 0x97); | 158 0x41, 0xff, 0xd3 // callq TMP |
| 159 }; |
| 160 return MatchesPattern(start_, pattern, kCallPatternSize); |
| 143 } | 161 } |
| 144 | 162 |
| 145 intptr_t pp_index() const { | 163 intptr_t pp_index() const { |
| 146 return IndexFromPPLoad(start_ + 3); | 164 return IndexFromPPLoad(start_ + 3); |
| 147 } | 165 } |
| 148 | 166 |
| 149 uword Target() const { | 167 RawCode* Target() const { |
| 150 return object_pool_.RawValueAt(pp_index()); | 168 Code& code = Code::Handle(); |
| 169 code ^= object_pool_.ObjectAt(pp_index()); |
| 170 return code.raw(); |
| 151 } | 171 } |
| 152 | 172 |
| 153 void SetTarget(uword target) const { | 173 void SetTarget(const Code& target) const { |
| 154 object_pool_.SetRawValueAt(pp_index(), target); | 174 object_pool_.SetObjectAt(pp_index(), target); |
| 155 // No need to flush the instruction cache, since the code is not modified. | 175 // No need to flush the instruction cache, since the code is not modified. |
| 156 } | 176 } |
| 157 | 177 |
| 158 protected: | 178 protected: |
| 159 uword start_; | 179 uword start_; |
| 160 const ObjectPool& object_pool_; | 180 const ObjectPool& object_pool_; |
| 161 | 181 |
| 162 private: | 182 private: |
| 163 DISALLOW_IMPLICIT_CONSTRUCTORS(PoolPointerCall); | 183 DISALLOW_IMPLICIT_CONSTRUCTORS(PoolPointerCall); |
| 164 }; | 184 }; |
| 165 | 185 |
| 166 | 186 |
| 167 uword CodePatcher::GetStaticCallTargetAt(uword return_address, | 187 RawCode* CodePatcher::GetStaticCallTargetAt(uword return_address, |
| 168 const Code& code) { | 188 const Code& code) { |
| 169 ASSERT(code.ContainsInstructionAt(return_address)); | 189 ASSERT(code.ContainsInstructionAt(return_address)); |
| 170 PoolPointerCall call(return_address, code); | 190 PoolPointerCall call(return_address, code); |
| 171 return call.Target(); | 191 return call.Target(); |
| 172 } | 192 } |
| 173 | 193 |
| 174 | 194 |
| 175 void CodePatcher::PatchStaticCallAt(uword return_address, | 195 void CodePatcher::PatchStaticCallAt(uword return_address, |
| 176 const Code& code, | 196 const Code& code, |
| 177 uword new_target) { | 197 const Code& new_target) { |
| 178 PatchPoolPointerCallAt(return_address, code, new_target); | 198 PatchPoolPointerCallAt(return_address, code, new_target); |
| 179 } | 199 } |
| 180 | 200 |
| 181 | 201 |
| 182 void CodePatcher::PatchPoolPointerCallAt(uword return_address, | 202 void CodePatcher::PatchPoolPointerCallAt(uword return_address, |
| 183 const Code& code, | 203 const Code& code, |
| 184 uword new_target) { | 204 const Code& new_target) { |
| 185 ASSERT(code.ContainsInstructionAt(return_address)); | 205 ASSERT(code.ContainsInstructionAt(return_address)); |
| 186 PoolPointerCall call(return_address, code); | 206 PoolPointerCall call(return_address, code); |
| 187 call.SetTarget(new_target); | 207 call.SetTarget(new_target); |
| 188 } | 208 } |
| 189 | 209 |
| 190 | 210 |
| 191 void CodePatcher::PatchInstanceCallAt(uword return_address, | 211 RawCode* CodePatcher::GetInstanceCallAt(uword return_address, |
| 192 const Code& code, | 212 const Code& code, |
| 193 uword new_target) { | 213 ICData* ic_data) { |
| 194 ASSERT(code.ContainsInstructionAt(return_address)); | |
| 195 InstanceCall call(return_address, code); | |
| 196 call.set_target(new_target); | |
| 197 } | |
| 198 | |
| 199 | |
| 200 uword CodePatcher::GetInstanceCallAt(uword return_address, | |
| 201 const Code& code, | |
| 202 ICData* ic_data) { | |
| 203 ASSERT(code.ContainsInstructionAt(return_address)); | 214 ASSERT(code.ContainsInstructionAt(return_address)); |
| 204 InstanceCall call(return_address, code); | 215 InstanceCall call(return_address, code); |
| 205 if (ic_data != NULL) { | 216 if (ic_data != NULL) { |
| 206 *ic_data ^= call.ic_data(); | 217 *ic_data ^= call.ic_data(); |
| 207 } | 218 } |
| 208 return call.target(); | 219 return call.target(); |
| 209 } | 220 } |
| 210 | 221 |
| 211 | 222 |
| 212 intptr_t CodePatcher::InstanceCallSizeInBytes() { | 223 intptr_t CodePatcher::InstanceCallSizeInBytes() { |
| 213 return InstanceCall::kCallPatternSize; | 224 return InstanceCall::kCallPatternSize; |
| 214 } | 225 } |
| 215 | 226 |
| 216 | 227 |
| 217 void CodePatcher::InsertCallAt(uword start, uword target) { | 228 void CodePatcher::InsertDeoptimizationCallAt(uword start, uword target) { |
| 218 // The inserted call should not overlap the lazy deopt jump code. | 229 // The inserted call should not overlap the lazy deopt jump code. |
| 219 ASSERT(start + ShortCallPattern::pattern_length_in_bytes() <= target); | 230 ASSERT(start + ShortCallPattern::pattern_length_in_bytes() <= target); |
| 220 *reinterpret_cast<uint8_t*>(start) = 0xE8; | 231 *reinterpret_cast<uint8_t*>(start) = 0xE8; |
| 221 ShortCallPattern call(start); | 232 ShortCallPattern call(start); |
| 222 call.SetTargetAddress(target); | 233 call.SetTargetAddress(target); |
| 223 CPU::FlushICache(start, ShortCallPattern::pattern_length_in_bytes()); | 234 CPU::FlushICache(start, ShortCallPattern::pattern_length_in_bytes()); |
| 224 } | 235 } |
| 225 | 236 |
| 226 | 237 |
| 227 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt( | 238 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt( |
| 228 uword return_address, const Code& code, ICData* ic_data_result) { | 239 uword return_address, const Code& code, ICData* ic_data_result) { |
| 229 ASSERT(code.ContainsInstructionAt(return_address)); | 240 ASSERT(code.ContainsInstructionAt(return_address)); |
| 230 UnoptimizedStaticCall static_call(return_address, code); | 241 UnoptimizedStaticCall static_call(return_address, code); |
| 231 ICData& ic_data = ICData::Handle(); | 242 ICData& ic_data = ICData::Handle(); |
| 232 ic_data ^= static_call.ic_data(); | 243 ic_data ^= static_call.ic_data(); |
| 233 if (ic_data_result != NULL) { | 244 if (ic_data_result != NULL) { |
| 234 *ic_data_result = ic_data.raw(); | 245 *ic_data_result = ic_data.raw(); |
| 235 } | 246 } |
| 236 return ic_data.GetTargetAt(0); | 247 return ic_data.GetTargetAt(0); |
| 237 } | 248 } |
| 238 | 249 |
| 239 | 250 |
| 240 void CodePatcher::PatchNativeCallAt(uword return_address, | 251 void CodePatcher::PatchNativeCallAt(uword return_address, |
| 241 const Code& code, | 252 const Code& code, |
| 242 NativeFunction target, | 253 NativeFunction target, |
| 243 const Code& trampoline) { | 254 const Code& trampoline) { |
| 244 ASSERT(code.ContainsInstructionAt(return_address)); | 255 ASSERT(code.ContainsInstructionAt(return_address)); |
| 245 NativeCall call(return_address, code); | 256 NativeCall call(return_address, code); |
| 246 call.set_target(trampoline.EntryPoint()); | 257 call.set_target(trampoline); |
| 247 call.set_native_function(target); | 258 call.set_native_function(target); |
| 248 } | 259 } |
| 249 | 260 |
| 250 | 261 |
| 251 uword CodePatcher::GetNativeCallAt(uword return_address, | 262 RawCode* CodePatcher::GetNativeCallAt(uword return_address, |
| 252 const Code& code, | 263 const Code& code, |
| 253 NativeFunction* target) { | 264 NativeFunction* target) { |
| 254 ASSERT(code.ContainsInstructionAt(return_address)); | 265 ASSERT(code.ContainsInstructionAt(return_address)); |
| 255 NativeCall call(return_address, code); | 266 NativeCall call(return_address, code); |
| 256 *target = call.native_function(); | 267 *target = call.native_function(); |
| 257 return call.target(); | 268 return call.target(); |
| 258 } | 269 } |
| 259 | 270 |
| 260 | 271 |
| 261 // The expected code pattern of an edge counter in unoptimized code: | 272 // The expected code pattern of an edge counter in unoptimized code: |
| 262 // 49 8b 87 imm32 mov RAX, [PP + offset] | 273 // 49 8b 87 imm32 mov RAX, [PP + offset] |
| 263 class EdgeCounter : public ValueObject { | 274 class EdgeCounter : public ValueObject { |
| (...skipping 21 matching lines...) Expand all Loading... |
| 285 | 296 |
| 286 RawObject* CodePatcher::GetEdgeCounterAt(uword pc, const Code& code) { | 297 RawObject* CodePatcher::GetEdgeCounterAt(uword pc, const Code& code) { |
| 287 ASSERT(code.ContainsInstructionAt(pc)); | 298 ASSERT(code.ContainsInstructionAt(pc)); |
| 288 EdgeCounter counter(pc, code); | 299 EdgeCounter counter(pc, code); |
| 289 return counter.edge_counter(); | 300 return counter.edge_counter(); |
| 290 } | 301 } |
| 291 | 302 |
| 292 } // namespace dart | 303 } // namespace dart |
| 293 | 304 |
| 294 #endif // defined TARGET_ARCH_X64 | 305 #endif // defined TARGET_ARCH_X64 |
| OLD | NEW |