Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(37)

Side by Side Diff: runtime/vm/code_patcher_ia32.cc

Issue 1192103004: VM: New calling convention for generated code. (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: fixed comments Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/code_patcher_arm64.cc ('k') | runtime/vm/code_patcher_mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
6 #if defined(TARGET_ARCH_IA32) 6 #if defined(TARGET_ARCH_IA32)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/code_patcher.h" 9 #include "vm/code_patcher.h"
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
11 #include "vm/dart_entry.h" 11 #include "vm/dart_entry.h"
12 #include "vm/flow_graph_compiler.h" 12 #include "vm/flow_graph_compiler.h"
13 #include "vm/instructions.h" 13 #include "vm/instructions.h"
14 #include "vm/object.h" 14 #include "vm/object.h"
15 #include "vm/raw_object.h" 15 #include "vm/raw_object.h"
16 16
17 namespace dart { 17 namespace dart {
18 18
19 // The expected pattern of a Dart unoptimized call (static and instance): 19 // The expected pattern of a Dart unoptimized call (static and instance):
20 // mov ECX, ic-data 20 // mov ECX, ic-data
21 // mov EDI, target-code-object
21 // call target_address (stub) 22 // call target_address (stub)
22 // <- return address 23 // <- return address
23 class UnoptimizedCall : public ValueObject { 24 class UnoptimizedCall : public ValueObject {
24 public: 25 public:
25 explicit UnoptimizedCall(uword return_address) 26 explicit UnoptimizedCall(uword return_address)
26 : start_(return_address - (kNumInstructions * kInstructionSize)) { 27 : start_(return_address - kPatternSize) {
27 ASSERT(IsValid(return_address)); 28 ASSERT(IsValid());
28 ASSERT(kInstructionSize == Assembler::kCallExternalLabelSize);
29 }
30
31 static bool IsValid(uword return_address) {
32 uint8_t* code_bytes =
33 reinterpret_cast<uint8_t*>(
34 return_address - (kNumInstructions * kInstructionSize));
35 return (code_bytes[0] == 0xB9) &&
36 (code_bytes[1 * kInstructionSize] == 0xE8);
37 }
38
39 uword target() const {
40 const uword offset = *reinterpret_cast<uword*>(call_address() + 1);
41 return return_address() + offset;
42 }
43
44 void set_target(uword target) const {
45 uword* target_addr = reinterpret_cast<uword*>(call_address() + 1);
46 uword offset = target - return_address();
47 WritableInstructionsScope writable(reinterpret_cast<uword>(target_addr),
48 sizeof(offset));
49 *target_addr = offset;
50 CPU::FlushICache(call_address(), kInstructionSize);
51 } 29 }
52 30
53 RawObject* ic_data() const { 31 RawObject* ic_data() const {
54 return *reinterpret_cast<RawObject**>(start_ + 1); 32 return *reinterpret_cast<RawObject**>(start_ + 1);
55 } 33 }
56 34
57 static const int kNumInstructions = 2; 35 static const int kMovInstructionSize = 5;
58 static const int kInstructionSize = 5; // All instructions have same length. 36 static const int kCallInstructionSize = 3;
37 static const int kPatternSize =
38 2 * kMovInstructionSize + kCallInstructionSize;
59 39
60 private: 40 private:
41 bool IsValid() {
42 uint8_t* code_bytes = reinterpret_cast<uint8_t*>(start_);
43 return (code_bytes[0] == 0xB9) &&
44 (code_bytes[2 * kMovInstructionSize] == 0xFF);
45 }
46
61 uword return_address() const { 47 uword return_address() const {
62 return start_ + kNumInstructions * kInstructionSize; 48 return start_ + kPatternSize;
63 } 49 }
64 50
65 uword call_address() const { 51 uword call_address() const {
66 return start_ + 1 * kInstructionSize; 52 return start_ + 2 * kMovInstructionSize;
67 } 53 }
68 54
69 protected: 55 protected:
70 uword start_; 56 uword start_;
71 57
72 private: 58 private:
73 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall); 59 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall);
74 }; 60 };
75 61
76 62
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
120 #endif // DEBUG 106 #endif // DEBUG
121 } 107 }
122 108
123 private: 109 private:
124 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedStaticCall); 110 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedStaticCall);
125 }; 111 };
126 112
127 113
128 // The expected pattern of a dart static call: 114 // The expected pattern of a dart static call:
129 // mov EDX, arguments_descriptor_array (optional in polymorphic calls) 115 // mov EDX, arguments_descriptor_array (optional in polymorphic calls)
130 // call target_address 116 // mov EDI, Immediate(code_object)
117 // call [EDI + entry_point_offset]
131 // <- return address 118 // <- return address
132 class StaticCall : public ValueObject { 119 class StaticCall : public ValueObject {
133 public: 120 public:
134 explicit StaticCall(uword return_address) 121 explicit StaticCall(uword return_address)
135 : start_(return_address - (kNumInstructions * kInstructionSize)) { 122 : start_(return_address - (kMovInstructionSize + kCallInstructionSize)) {
136 ASSERT(IsValid(return_address)); 123 ASSERT(IsValid());
137 ASSERT(kInstructionSize == Assembler::kCallExternalLabelSize);
138 } 124 }
139 125
140 static bool IsValid(uword return_address) { 126 bool IsValid() {
141 uint8_t* code_bytes = 127 uint8_t* code_bytes = reinterpret_cast<uint8_t*>(start_);
142 reinterpret_cast<uint8_t*>( 128 return (code_bytes[0] == 0xBF) && (code_bytes[5] == 0xFF);
143 return_address - (kNumInstructions * kInstructionSize));
144 return (code_bytes[0] == 0xE8);
145 } 129 }
146 130
147 uword target() const { 131 RawCode* target() const {
148 const uword offset = *reinterpret_cast<uword*>(call_address() + 1); 132 const uword imm = *reinterpret_cast<uword*>(start_ + 1);
149 return return_address() + offset; 133 return reinterpret_cast<RawCode*>(imm);
150 } 134 }
151 135
152 void set_target(uword target) const { 136 void set_target(const Code& target) const {
153 uword* target_addr = reinterpret_cast<uword*>(call_address() + 1); 137 uword* target_addr = reinterpret_cast<uword*>(start_ + 1);
154 uword offset = target - return_address(); 138 uword imm = reinterpret_cast<uword>(target.raw());
155 *target_addr = offset; 139 *target_addr = imm;
156 CPU::FlushICache(call_address(), kInstructionSize); 140 CPU::FlushICache(start_ + 1, sizeof(imm));
157 } 141 }
158 142
159 static const int kNumInstructions = 1; 143 static const int kMovInstructionSize = 5;
160 static const int kInstructionSize = 5; // All instructions have same length. 144 static const int kCallInstructionSize = 3;
161 145
162 private: 146 private:
163 uword return_address() const { 147 uword return_address() const {
164 return start_ + kNumInstructions * kInstructionSize; 148 return start_ + kMovInstructionSize + kCallInstructionSize;
165 } 149 }
166 150
167 uword call_address() const { 151 uword call_address() const {
168 return start_; 152 return start_ + kMovInstructionSize;
169 } 153 }
170 154
171 uword start_; 155 uword start_;
172 156
173 DISALLOW_IMPLICIT_CONSTRUCTORS(StaticCall); 157 DISALLOW_IMPLICIT_CONSTRUCTORS(StaticCall);
174 }; 158 };
175 159
176 160
177 uword CodePatcher::GetStaticCallTargetAt(uword return_address, 161 RawCode* CodePatcher::GetStaticCallTargetAt(uword return_address,
178 const Code& code) { 162 const Code& code) {
179 ASSERT(code.ContainsInstructionAt(return_address)); 163 ASSERT(code.ContainsInstructionAt(return_address));
180 StaticCall call(return_address); 164 StaticCall call(return_address);
181 return call.target(); 165 return call.target();
182 } 166 }
183 167
184 168
185 void CodePatcher::PatchStaticCallAt(uword return_address, 169 void CodePatcher::PatchStaticCallAt(uword return_address,
186 const Code& code, 170 const Code& code,
187 uword new_target) { 171 const Code& new_target) {
172 const Instructions& instrs = Instructions::Handle(code.instructions());
173 WritableInstructionsScope writable(instrs.EntryPoint(), instrs.size());
188 ASSERT(code.ContainsInstructionAt(return_address)); 174 ASSERT(code.ContainsInstructionAt(return_address));
189 StaticCall call(return_address); 175 StaticCall call(return_address);
190 call.set_target(new_target); 176 call.set_target(new_target);
191 } 177 }
192 178
193 179
194 void CodePatcher::PatchInstanceCallAt(uword return_address, 180 void CodePatcher::InsertDeoptimizationCallAt(uword start, uword target) {
195 const Code& code,
196 uword new_target) {
197 ASSERT(code.ContainsInstructionAt(return_address));
198 InstanceCall call(return_address);
199 call.set_target(new_target);
200 }
201
202
203 void CodePatcher::InsertCallAt(uword start, uword target) {
204 // The inserted call should not overlap the lazy deopt jump code. 181 // The inserted call should not overlap the lazy deopt jump code.
205 ASSERT(start + CallPattern::pattern_length_in_bytes() <= target); 182 ASSERT(start + CallPattern::pattern_length_in_bytes() <= target);
206 *reinterpret_cast<uint8_t*>(start) = 0xE8; 183 *reinterpret_cast<uint8_t*>(start) = 0xE8;
207 CallPattern call(start); 184 CallPattern call(start);
208 call.SetTargetAddress(target); 185 call.SetTargetAddress(target);
209 CPU::FlushICache(start, CallPattern::pattern_length_in_bytes()); 186 CPU::FlushICache(start, CallPattern::pattern_length_in_bytes());
210 } 187 }
211 188
212 189
213 uword CodePatcher::GetInstanceCallAt( 190 RawCode* CodePatcher::GetInstanceCallAt(
214 uword return_address, const Code& code, ICData* ic_data) { 191 uword return_address, const Code& code, ICData* ic_data) {
215 ASSERT(code.ContainsInstructionAt(return_address)); 192 ASSERT(code.ContainsInstructionAt(return_address));
216 InstanceCall call(return_address); 193 InstanceCall call(return_address);
217 if (ic_data != NULL) { 194 if (ic_data != NULL) {
218 *ic_data ^= call.ic_data(); 195 *ic_data ^= call.ic_data();
219 } 196 }
220 return call.target(); 197 return Code::null();
221 } 198 }
222 199
223 200
224 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt( 201 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(
225 uword return_address, const Code& code, ICData* ic_data_result) { 202 uword return_address, const Code& code, ICData* ic_data_result) {
226 ASSERT(code.ContainsInstructionAt(return_address)); 203 ASSERT(code.ContainsInstructionAt(return_address));
227 UnoptimizedStaticCall static_call(return_address); 204 UnoptimizedStaticCall static_call(return_address);
228 ICData& ic_data = ICData::Handle(); 205 ICData& ic_data = ICData::Handle();
229 ic_data ^= static_call.ic_data(); 206 ic_data ^= static_call.ic_data();
230 if (ic_data_result != NULL) { 207 if (ic_data_result != NULL) {
231 *ic_data_result = ic_data.raw(); 208 *ic_data_result = ic_data.raw();
232 } 209 }
233 return ic_data.GetTargetAt(0); 210 return ic_data.GetTargetAt(0);
234 } 211 }
235 212
236 213
237 void CodePatcher::PatchNativeCallAt(uword return_address, 214 void CodePatcher::PatchNativeCallAt(uword return_address,
238 const Code& code, 215 const Code& code,
239 NativeFunction target, 216 NativeFunction target,
240 const Code& trampoline) { 217 const Code& trampoline) {
241 ASSERT(code.ContainsInstructionAt(return_address)); 218 UNREACHABLE();
242 NativeCall call(return_address);
243 call.set_target(trampoline.EntryPoint());
244 call.set_native_function(target);
245 } 219 }
246 220
247 221
248 uword CodePatcher::GetNativeCallAt(uword return_address, 222 RawCode* CodePatcher::GetNativeCallAt(uword return_address,
249 const Code& code, 223 const Code& code,
250 NativeFunction* target) { 224 NativeFunction* target) {
251 ASSERT(code.ContainsInstructionAt(return_address)); 225 UNREACHABLE();
252 NativeCall call(return_address); 226 return NULL;
253 *target = call.native_function();
254 return call.target();
255 } 227 }
256 228
257 229
258 230
259 intptr_t CodePatcher::InstanceCallSizeInBytes() { 231 intptr_t CodePatcher::InstanceCallSizeInBytes() {
260 return InstanceCall::kNumInstructions * InstanceCall::kInstructionSize; 232 return InstanceCall::kPatternSize;
261 } 233 }
262 234
263 235
264 // The expected code pattern of an edge counter in unoptimized code: 236 // The expected code pattern of an edge counter in unoptimized code:
265 // b8 imm32 mov EAX, immediate 237 // b8 imm32 mov EAX, immediate
266 class EdgeCounter : public ValueObject { 238 class EdgeCounter : public ValueObject {
267 public: 239 public:
268 EdgeCounter(uword pc, const Code& ignored) 240 EdgeCounter(uword pc, const Code& ignored)
269 : end_(pc - FlowGraphCompiler::EdgeCounterIncrementSizeInBytes()) { 241 : end_(pc - FlowGraphCompiler::EdgeCounterIncrementSizeInBytes()) {
270 ASSERT(IsValid(end_)); 242 ASSERT(IsValid(end_));
(...skipping 14 matching lines...) Expand all
285 257
286 RawObject* CodePatcher::GetEdgeCounterAt(uword pc, const Code& code) { 258 RawObject* CodePatcher::GetEdgeCounterAt(uword pc, const Code& code) {
287 ASSERT(code.ContainsInstructionAt(pc)); 259 ASSERT(code.ContainsInstructionAt(pc));
288 EdgeCounter counter(pc, code); 260 EdgeCounter counter(pc, code);
289 return counter.edge_counter(); 261 return counter.edge_counter();
290 } 262 }
291 263
292 } // namespace dart 264 } // namespace dart
293 265
294 #endif // defined TARGET_ARCH_IA32 266 #endif // defined TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « runtime/vm/code_patcher_arm64.cc ('k') | runtime/vm/code_patcher_mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698