OLD | NEW |
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_X64. |
6 #if defined(TARGET_ARCH_X64) | 6 #if defined(TARGET_ARCH_X64) |
7 | 7 |
8 #include "vm/assembler.h" | 8 #include "vm/assembler.h" |
9 #include "vm/code_patcher.h" | 9 #include "vm/code_patcher.h" |
10 #include "vm/cpu.h" | 10 #include "vm/cpu.h" |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
47 : object_pool_(ObjectPool::Handle(code.GetObjectPool())), | 47 : object_pool_(ObjectPool::Handle(code.GetObjectPool())), |
48 start_(return_address - kCallPatternSize) { | 48 start_(return_address - kCallPatternSize) { |
49 ASSERT((kCallPatternSize - 7) == Assembler::kCallExternalLabelSize); | 49 ASSERT((kCallPatternSize - 7) == Assembler::kCallExternalLabelSize); |
50 ASSERT(IsValid()); | 50 ASSERT(IsValid()); |
51 } | 51 } |
52 | 52 |
53 static const int kCallPatternSize = 22; | 53 static const int kCallPatternSize = 22; |
54 | 54 |
55 bool IsValid() const { | 55 bool IsValid() const { |
56 static int16_t pattern[kCallPatternSize] = { | 56 static int16_t pattern[kCallPatternSize] = { |
57 0x49, 0x8b, 0x9f, -1, -1, -1, -1, // movq RBX, [PP + offs] | 57 0x49, 0x8b, 0x9f, -1, -1, -1, -1, // movq RBX, [PP + offs] |
58 0x4d, 0x8b, 0xa7, -1, -1, -1, -1, // movq CR, [PP + offs] | 58 0x4d, 0x8b, 0xa7, -1, -1, -1, -1, // movq CR, [PP + offs] |
59 0x4d, 0x8b, 0x5c, 0x24, 0x07, // movq TMP, [CR + entry_point_offs] | 59 0x4d, 0x8b, 0x5c, 0x24, 0x07, // movq TMP, [CR + entry_point_offs] |
60 0x41, 0xff, 0xd3 // callq TMP | 60 0x41, 0xff, 0xd3 // callq TMP |
61 }; | 61 }; |
62 return MatchesPattern(start_, pattern, kCallPatternSize); | 62 return MatchesPattern(start_, pattern, kCallPatternSize); |
63 } | 63 } |
64 | 64 |
65 intptr_t argument_index() const { | 65 intptr_t argument_index() const { return IndexFromPPLoad(start_ + 3); } |
66 return IndexFromPPLoad(start_ + 3); | |
67 } | |
68 | 66 |
69 RawObject* ic_data() const { | 67 RawObject* ic_data() const { return object_pool_.ObjectAt(argument_index()); } |
70 return object_pool_.ObjectAt(argument_index()); | |
71 } | |
72 | 68 |
73 RawCode* target() const { | 69 RawCode* target() const { |
74 intptr_t index = IndexFromPPLoad(start_ + 10); | 70 intptr_t index = IndexFromPPLoad(start_ + 10); |
75 Code& code = Code::Handle(); | 71 Code& code = Code::Handle(); |
76 code ^= object_pool_.ObjectAt(index); | 72 code ^= object_pool_.ObjectAt(index); |
77 return code.raw(); | 73 return code.raw(); |
78 } | 74 } |
79 | 75 |
80 void set_target(const Code& target) const { | 76 void set_target(const Code& target) const { |
81 intptr_t index = IndexFromPPLoad(start_ + 10); | 77 intptr_t index = IndexFromPPLoad(start_ + 10); |
82 object_pool_.SetObjectAt(index, target); | 78 object_pool_.SetObjectAt(index, target); |
83 // No need to flush the instruction cache, since the code is not modified. | 79 // No need to flush the instruction cache, since the code is not modified. |
84 } | 80 } |
85 | 81 |
86 protected: | 82 protected: |
87 const ObjectPool& object_pool_; | 83 const ObjectPool& object_pool_; |
88 | 84 |
89 private: | 85 private: |
90 uword start_; | 86 uword start_; |
91 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall); | 87 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall); |
92 }; | 88 }; |
93 | 89 |
94 | 90 |
95 class NativeCall : public UnoptimizedCall { | 91 class NativeCall : public UnoptimizedCall { |
96 public: | 92 public: |
97 NativeCall(uword return_address, const Code& code) | 93 NativeCall(uword return_address, const Code& code) |
98 : UnoptimizedCall(return_address, code) { | 94 : UnoptimizedCall(return_address, code) {} |
99 } | |
100 | 95 |
101 NativeFunction native_function() const { | 96 NativeFunction native_function() const { |
102 return reinterpret_cast<NativeFunction>( | 97 return reinterpret_cast<NativeFunction>( |
103 object_pool_.RawValueAt(argument_index())); | 98 object_pool_.RawValueAt(argument_index())); |
104 } | 99 } |
105 | 100 |
106 void set_native_function(NativeFunction func) const { | 101 void set_native_function(NativeFunction func) const { |
107 object_pool_.SetRawValueAt(argument_index(), | 102 object_pool_.SetRawValueAt(argument_index(), reinterpret_cast<uword>(func)); |
108 reinterpret_cast<uword>(func)); | |
109 } | 103 } |
110 | 104 |
111 private: | 105 private: |
112 DISALLOW_IMPLICIT_CONSTRUCTORS(NativeCall); | 106 DISALLOW_IMPLICIT_CONSTRUCTORS(NativeCall); |
113 }; | 107 }; |
114 | 108 |
115 | 109 |
116 class InstanceCall : public UnoptimizedCall { | 110 class InstanceCall : public UnoptimizedCall { |
117 public: | 111 public: |
118 InstanceCall(uword return_address, const Code& code) | 112 InstanceCall(uword return_address, const Code& code) |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
152 explicit PoolPointerCall(uword return_address, const Code& code) | 146 explicit PoolPointerCall(uword return_address, const Code& code) |
153 : start_(return_address - kCallPatternSize), | 147 : start_(return_address - kCallPatternSize), |
154 object_pool_(ObjectPool::Handle(code.GetObjectPool())) { | 148 object_pool_(ObjectPool::Handle(code.GetObjectPool())) { |
155 ASSERT(IsValid()); | 149 ASSERT(IsValid()); |
156 } | 150 } |
157 | 151 |
158 static const int kCallPatternSize = 15; | 152 static const int kCallPatternSize = 15; |
159 | 153 |
160 bool IsValid() const { | 154 bool IsValid() const { |
161 static int16_t pattern[kCallPatternSize] = { | 155 static int16_t pattern[kCallPatternSize] = { |
162 0x4d, 0x8b, 0xa7, -1, -1, -1, -1, // movq CR, [PP + offs] | 156 0x4d, 0x8b, 0xa7, -1, -1, -1, -1, // movq CR, [PP + offs] |
163 0x4d, 0x8b, 0x5c, 0x24, 0x07, // movq TMP, [CR + entry_point_off] | 157 0x4d, 0x8b, 0x5c, 0x24, 0x07, // movq TMP, [CR + entry_point_off] |
164 0x41, 0xff, 0xd3 // callq TMP | 158 0x41, 0xff, 0xd3 // callq TMP |
165 }; | 159 }; |
166 return MatchesPattern(start_, pattern, kCallPatternSize); | 160 return MatchesPattern(start_, pattern, kCallPatternSize); |
167 } | 161 } |
168 | 162 |
169 intptr_t pp_index() const { | 163 intptr_t pp_index() const { return IndexFromPPLoad(start_ + 3); } |
170 return IndexFromPPLoad(start_ + 3); | |
171 } | |
172 | 164 |
173 RawCode* Target() const { | 165 RawCode* Target() const { |
174 Code& code = Code::Handle(); | 166 Code& code = Code::Handle(); |
175 code ^= object_pool_.ObjectAt(pp_index()); | 167 code ^= object_pool_.ObjectAt(pp_index()); |
176 return code.raw(); | 168 return code.raw(); |
177 } | 169 } |
178 | 170 |
179 void SetTarget(const Code& target) const { | 171 void SetTarget(const Code& target) const { |
180 object_pool_.SetObjectAt(pp_index(), target); | 172 object_pool_.SetObjectAt(pp_index(), target); |
181 // No need to flush the instruction cache, since the code is not modified. | 173 // No need to flush the instruction cache, since the code is not modified. |
(...skipping 18 matching lines...) Expand all Loading... |
200 SwitchableCall(uword return_address, const Code& code) | 192 SwitchableCall(uword return_address, const Code& code) |
201 : start_(return_address - kCallPatternSize), | 193 : start_(return_address - kCallPatternSize), |
202 object_pool_(ObjectPool::Handle(code.GetObjectPool())) { | 194 object_pool_(ObjectPool::Handle(code.GetObjectPool())) { |
203 ASSERT(IsValid()); | 195 ASSERT(IsValid()); |
204 } | 196 } |
205 | 197 |
206 static const int kCallPatternSize = 21; | 198 static const int kCallPatternSize = 21; |
207 | 199 |
208 bool IsValid() const { | 200 bool IsValid() const { |
209 static int16_t pattern[kCallPatternSize] = { | 201 static int16_t pattern[kCallPatternSize] = { |
210 0x4d, 0x8b, 0xa7, -1, -1, -1, -1, // movq r12, [PP + code_offs] | 202 0x4d, 0x8b, 0xa7, -1, -1, -1, -1, // movq r12, [PP + code_offs] |
211 0x49, 0x8b, 0x4c, 0x24, 0x0f, // movq rcx, [r12 + entrypoint_off] | 203 0x49, 0x8b, 0x4c, 0x24, 0x0f, // movq rcx, [r12 + entrypoint_off] |
212 0x49, 0x8b, 0x9f, -1, -1, -1, -1, // movq rbx, [PP + cache_offs] | 204 0x49, 0x8b, 0x9f, -1, -1, -1, -1, // movq rbx, [PP + cache_offs] |
213 0xff, 0xd1, // call rcx | 205 0xff, 0xd1, // call rcx |
214 }; | 206 }; |
215 ASSERT(ARRAY_SIZE(pattern) == kCallPatternSize); | 207 ASSERT(ARRAY_SIZE(pattern) == kCallPatternSize); |
216 return MatchesPattern(start_, pattern, kCallPatternSize); | 208 return MatchesPattern(start_, pattern, kCallPatternSize); |
217 } | 209 } |
218 | 210 |
219 intptr_t data_index() const { | 211 intptr_t data_index() const { return IndexFromPPLoad(start_ + 15); } |
220 return IndexFromPPLoad(start_ + 15); | 212 intptr_t target_index() const { return IndexFromPPLoad(start_ + 3); } |
221 } | |
222 intptr_t target_index() const { | |
223 return IndexFromPPLoad(start_ + 3); | |
224 } | |
225 | 213 |
226 RawObject* data() const { | 214 RawObject* data() const { return object_pool_.ObjectAt(data_index()); } |
227 return object_pool_.ObjectAt(data_index()); | |
228 } | |
229 RawCode* target() const { | 215 RawCode* target() const { |
230 return reinterpret_cast<RawCode*>(object_pool_.ObjectAt(target_index())); | 216 return reinterpret_cast<RawCode*>(object_pool_.ObjectAt(target_index())); |
231 } | 217 } |
232 | 218 |
233 void SetData(const Object& data) const { | 219 void SetData(const Object& data) const { |
234 ASSERT(!Object::Handle(object_pool_.ObjectAt(data_index())).IsCode()); | 220 ASSERT(!Object::Handle(object_pool_.ObjectAt(data_index())).IsCode()); |
235 object_pool_.SetObjectAt(data_index(), data); | 221 object_pool_.SetObjectAt(data_index(), data); |
236 // No need to flush the instruction cache, since the code is not modified. | 222 // No need to flush the instruction cache, since the code is not modified. |
237 } | 223 } |
238 | 224 |
239 void SetTarget(const Code& target) const { | 225 void SetTarget(const Code& target) const { |
240 ASSERT(Object::Handle(object_pool_.ObjectAt(target_index())).IsCode()); | 226 ASSERT(Object::Handle(object_pool_.ObjectAt(target_index())).IsCode()); |
241 object_pool_.SetObjectAt(target_index(), target); | 227 object_pool_.SetObjectAt(target_index(), target); |
242 // No need to flush the instruction cache, since the code is not modified. | 228 // No need to flush the instruction cache, since the code is not modified. |
243 } | 229 } |
244 | 230 |
245 protected: | 231 protected: |
246 uword start_; | 232 uword start_; |
247 const ObjectPool& object_pool_; | 233 const ObjectPool& object_pool_; |
248 | 234 |
249 private: | 235 private: |
250 DISALLOW_IMPLICIT_CONSTRUCTORS(SwitchableCall); | 236 DISALLOW_IMPLICIT_CONSTRUCTORS(SwitchableCall); |
251 }; | 237 }; |
252 | 238 |
253 | 239 |
254 | |
255 RawCode* CodePatcher::GetStaticCallTargetAt(uword return_address, | 240 RawCode* CodePatcher::GetStaticCallTargetAt(uword return_address, |
256 const Code& code) { | 241 const Code& code) { |
257 ASSERT(code.ContainsInstructionAt(return_address)); | 242 ASSERT(code.ContainsInstructionAt(return_address)); |
258 PoolPointerCall call(return_address, code); | 243 PoolPointerCall call(return_address, code); |
259 return call.Target(); | 244 return call.Target(); |
260 } | 245 } |
261 | 246 |
262 | 247 |
263 void CodePatcher::PatchStaticCallAt(uword return_address, | 248 void CodePatcher::PatchStaticCallAt(uword return_address, |
264 const Code& code, | 249 const Code& code, |
(...skipping 26 matching lines...) Expand all Loading... |
291 intptr_t CodePatcher::InstanceCallSizeInBytes() { | 276 intptr_t CodePatcher::InstanceCallSizeInBytes() { |
292 return InstanceCall::kCallPatternSize; | 277 return InstanceCall::kCallPatternSize; |
293 } | 278 } |
294 | 279 |
295 | 280 |
296 void CodePatcher::InsertDeoptimizationCallAt(uword start) { | 281 void CodePatcher::InsertDeoptimizationCallAt(uword start) { |
297 UNREACHABLE(); | 282 UNREACHABLE(); |
298 } | 283 } |
299 | 284 |
300 | 285 |
301 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt( | 286 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(uword return_address, |
302 uword return_address, const Code& code, ICData* ic_data_result) { | 287 const Code& code, |
| 288 ICData* ic_data_result) { |
303 ASSERT(code.ContainsInstructionAt(return_address)); | 289 ASSERT(code.ContainsInstructionAt(return_address)); |
304 UnoptimizedStaticCall static_call(return_address, code); | 290 UnoptimizedStaticCall static_call(return_address, code); |
305 ICData& ic_data = ICData::Handle(); | 291 ICData& ic_data = ICData::Handle(); |
306 ic_data ^= static_call.ic_data(); | 292 ic_data ^= static_call.ic_data(); |
307 if (ic_data_result != NULL) { | 293 if (ic_data_result != NULL) { |
308 *ic_data_result = ic_data.raw(); | 294 *ic_data_result = ic_data.raw(); |
309 } | 295 } |
310 return ic_data.GetTargetAt(0); | 296 return ic_data.GetTargetAt(0); |
311 } | 297 } |
312 | 298 |
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
354 NativeFunction* target) { | 340 NativeFunction* target) { |
355 ASSERT(code.ContainsInstructionAt(return_address)); | 341 ASSERT(code.ContainsInstructionAt(return_address)); |
356 NativeCall call(return_address, code); | 342 NativeCall call(return_address, code); |
357 *target = call.native_function(); | 343 *target = call.native_function(); |
358 return call.target(); | 344 return call.target(); |
359 } | 345 } |
360 | 346 |
361 } // namespace dart | 347 } // namespace dart |
362 | 348 |
363 #endif // defined TARGET_ARCH_X64 | 349 #endif // defined TARGET_ARCH_X64 |
OLD | NEW |