Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(84)

Side by Side Diff: runtime/vm/code_patcher_ia32.cc

Issue 1343373003: Revert "VM: New calling convention for generated code." (Closed) Base URL: git@github.com:dart-lang/sdk.git@master
Patch Set: Created 5 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « runtime/vm/code_patcher_arm64.cc ('k') | runtime/vm/code_patcher_mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_IA32.
6 #if defined(TARGET_ARCH_IA32) 6 #if defined(TARGET_ARCH_IA32)
7 7
8 #include "vm/assembler.h" 8 #include "vm/assembler.h"
9 #include "vm/code_patcher.h" 9 #include "vm/code_patcher.h"
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
11 #include "vm/dart_entry.h" 11 #include "vm/dart_entry.h"
12 #include "vm/flow_graph_compiler.h" 12 #include "vm/flow_graph_compiler.h"
13 #include "vm/instructions.h" 13 #include "vm/instructions.h"
14 #include "vm/object.h" 14 #include "vm/object.h"
15 #include "vm/raw_object.h" 15 #include "vm/raw_object.h"
16 16
17 namespace dart { 17 namespace dart {
18 18
19 // The expected pattern of a Dart unoptimized call (static and instance): 19 // The expected pattern of a Dart unoptimized call (static and instance):
20 // mov ECX, ic-data 20 // mov ECX, ic-data
21 // mov EDI, target-code-object
22 // call target_address (stub) 21 // call target_address (stub)
23 // <- return address 22 // <- return address
24 class UnoptimizedCall : public ValueObject { 23 class UnoptimizedCall : public ValueObject {
25 public: 24 public:
26 explicit UnoptimizedCall(uword return_address) 25 explicit UnoptimizedCall(uword return_address)
27 : start_(return_address - kPatternSize) { 26 : start_(return_address - (kNumInstructions * kInstructionSize)) {
28 ASSERT(IsValid()); 27 ASSERT(IsValid(return_address));
28 ASSERT(kInstructionSize == Assembler::kCallExternalLabelSize);
29 }
30
31 static bool IsValid(uword return_address) {
32 uint8_t* code_bytes =
33 reinterpret_cast<uint8_t*>(
34 return_address - (kNumInstructions * kInstructionSize));
35 return (code_bytes[0] == 0xB9) &&
36 (code_bytes[1 * kInstructionSize] == 0xE8);
37 }
38
39 uword target() const {
40 const uword offset = *reinterpret_cast<uword*>(call_address() + 1);
41 return return_address() + offset;
42 }
43
44 void set_target(uword target) const {
45 uword* target_addr = reinterpret_cast<uword*>(call_address() + 1);
46 uword offset = target - return_address();
47 WritableInstructionsScope writable(reinterpret_cast<uword>(target_addr),
48 sizeof(offset));
49 *target_addr = offset;
50 CPU::FlushICache(call_address(), kInstructionSize);
29 } 51 }
30 52
31 RawObject* ic_data() const { 53 RawObject* ic_data() const {
32 return *reinterpret_cast<RawObject**>(start_ + 1); 54 return *reinterpret_cast<RawObject**>(start_ + 1);
33 } 55 }
34 56
35 static const int kMovInstructionSize = 5; 57 static const int kNumInstructions = 2;
36 static const int kCallInstructionSize = 3; 58 static const int kInstructionSize = 5; // All instructions have same length.
37 static const int kPatternSize =
38 2 * kMovInstructionSize + kCallInstructionSize;
39 59
40 private: 60 private:
41 bool IsValid() {
42 uint8_t* code_bytes = reinterpret_cast<uint8_t*>(start_);
43 return (code_bytes[0] == 0xB9) &&
44 (code_bytes[2 * kMovInstructionSize] == 0xFF);
45 }
46
47 uword return_address() const { 61 uword return_address() const {
48 return start_ + kPatternSize; 62 return start_ + kNumInstructions * kInstructionSize;
49 } 63 }
50 64
51 uword call_address() const { 65 uword call_address() const {
52 return start_ + 2 * kMovInstructionSize; 66 return start_ + 1 * kInstructionSize;
53 } 67 }
54 68
55 protected: 69 protected:
56 uword start_; 70 uword start_;
57 71
58 private: 72 private:
59 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall); 73 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedCall);
60 }; 74 };
61 75
62 76
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
106 #endif // DEBUG 120 #endif // DEBUG
107 } 121 }
108 122
109 private: 123 private:
110 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedStaticCall); 124 DISALLOW_IMPLICIT_CONSTRUCTORS(UnoptimizedStaticCall);
111 }; 125 };
112 126
113 127
114 // The expected pattern of a dart static call: 128 // The expected pattern of a dart static call:
115 // mov EDX, arguments_descriptor_array (optional in polymorphic calls) 129 // mov EDX, arguments_descriptor_array (optional in polymorphic calls)
116 // mov EDI, Immediate(code_object) 130 // call target_address
117 // call [EDI + entry_point_offset]
118 // <- return address 131 // <- return address
119 class StaticCall : public ValueObject { 132 class StaticCall : public ValueObject {
120 public: 133 public:
121 explicit StaticCall(uword return_address) 134 explicit StaticCall(uword return_address)
122 : start_(return_address - (kMovInstructionSize + kCallInstructionSize)) { 135 : start_(return_address - (kNumInstructions * kInstructionSize)) {
123 ASSERT(IsValid()); 136 ASSERT(IsValid(return_address));
137 ASSERT(kInstructionSize == Assembler::kCallExternalLabelSize);
124 } 138 }
125 139
126 bool IsValid() { 140 static bool IsValid(uword return_address) {
127 uint8_t* code_bytes = reinterpret_cast<uint8_t*>(start_); 141 uint8_t* code_bytes =
128 return (code_bytes[0] == 0xBF) && (code_bytes[5] == 0xFF); 142 reinterpret_cast<uint8_t*>(
143 return_address - (kNumInstructions * kInstructionSize));
144 return (code_bytes[0] == 0xE8);
129 } 145 }
130 146
131 RawCode* target() const { 147 uword target() const {
132 const uword imm = *reinterpret_cast<uword*>(start_ + 1); 148 const uword offset = *reinterpret_cast<uword*>(call_address() + 1);
133 return reinterpret_cast<RawCode*>(imm); 149 return return_address() + offset;
134 } 150 }
135 151
136 void set_target(const Code& target) const { 152 void set_target(uword target) const {
137 uword* target_addr = reinterpret_cast<uword*>(start_ + 1); 153 uword* target_addr = reinterpret_cast<uword*>(call_address() + 1);
138 uword imm = reinterpret_cast<uword>(target.raw()); 154 uword offset = target - return_address();
139 *target_addr = imm; 155 *target_addr = offset;
140 CPU::FlushICache(start_ + 1, sizeof(imm)); 156 CPU::FlushICache(call_address(), kInstructionSize);
141 } 157 }
142 158
143 static const int kMovInstructionSize = 5; 159 static const int kNumInstructions = 1;
144 static const int kCallInstructionSize = 3; 160 static const int kInstructionSize = 5; // All instructions have same length.
145 161
146 private: 162 private:
147 uword return_address() const { 163 uword return_address() const {
148 return start_ + kMovInstructionSize + kCallInstructionSize; 164 return start_ + kNumInstructions * kInstructionSize;
149 } 165 }
150 166
151 uword call_address() const { 167 uword call_address() const {
152 return start_ + kMovInstructionSize; 168 return start_;
153 } 169 }
154 170
155 uword start_; 171 uword start_;
156 172
157 DISALLOW_IMPLICIT_CONSTRUCTORS(StaticCall); 173 DISALLOW_IMPLICIT_CONSTRUCTORS(StaticCall);
158 }; 174 };
159 175
160 176
161 RawCode* CodePatcher::GetStaticCallTargetAt(uword return_address, 177 uword CodePatcher::GetStaticCallTargetAt(uword return_address,
162 const Code& code) { 178 const Code& code) {
163 ASSERT(code.ContainsInstructionAt(return_address)); 179 ASSERT(code.ContainsInstructionAt(return_address));
164 StaticCall call(return_address); 180 StaticCall call(return_address);
165 return call.target(); 181 return call.target();
166 } 182 }
167 183
168 184
169 void CodePatcher::PatchStaticCallAt(uword return_address, 185 void CodePatcher::PatchStaticCallAt(uword return_address,
170 const Code& code, 186 const Code& code,
171 const Code& new_target) { 187 uword new_target) {
172 const Instructions& instrs = Instructions::Handle(code.instructions());
173 WritableInstructionsScope writable(instrs.EntryPoint(), instrs.size());
174 ASSERT(code.ContainsInstructionAt(return_address)); 188 ASSERT(code.ContainsInstructionAt(return_address));
175 StaticCall call(return_address); 189 StaticCall call(return_address);
176 call.set_target(new_target); 190 call.set_target(new_target);
177 } 191 }
178 192
179 193
180 void CodePatcher::InsertDeoptimizationCallAt(uword start, uword target) { 194 void CodePatcher::PatchInstanceCallAt(uword return_address,
195 const Code& code,
196 uword new_target) {
197 ASSERT(code.ContainsInstructionAt(return_address));
198 InstanceCall call(return_address);
199 call.set_target(new_target);
200 }
201
202
203 void CodePatcher::InsertCallAt(uword start, uword target) {
181 // The inserted call should not overlap the lazy deopt jump code. 204 // The inserted call should not overlap the lazy deopt jump code.
182 ASSERT(start + CallPattern::pattern_length_in_bytes() <= target); 205 ASSERT(start + CallPattern::pattern_length_in_bytes() <= target);
183 *reinterpret_cast<uint8_t*>(start) = 0xE8; 206 *reinterpret_cast<uint8_t*>(start) = 0xE8;
184 CallPattern call(start); 207 CallPattern call(start);
185 call.SetTargetAddress(target); 208 call.SetTargetAddress(target);
186 CPU::FlushICache(start, CallPattern::pattern_length_in_bytes()); 209 CPU::FlushICache(start, CallPattern::pattern_length_in_bytes());
187 } 210 }
188 211
189 212
190 RawCode* CodePatcher::GetInstanceCallAt( 213 uword CodePatcher::GetInstanceCallAt(
191 uword return_address, const Code& code, ICData* ic_data) { 214 uword return_address, const Code& code, ICData* ic_data) {
192 ASSERT(code.ContainsInstructionAt(return_address)); 215 ASSERT(code.ContainsInstructionAt(return_address));
193 InstanceCall call(return_address); 216 InstanceCall call(return_address);
194 if (ic_data != NULL) { 217 if (ic_data != NULL) {
195 *ic_data ^= call.ic_data(); 218 *ic_data ^= call.ic_data();
196 } 219 }
197 return Code::null(); 220 return call.target();
198 } 221 }
199 222
200 223
201 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt( 224 RawFunction* CodePatcher::GetUnoptimizedStaticCallAt(
202 uword return_address, const Code& code, ICData* ic_data_result) { 225 uword return_address, const Code& code, ICData* ic_data_result) {
203 ASSERT(code.ContainsInstructionAt(return_address)); 226 ASSERT(code.ContainsInstructionAt(return_address));
204 UnoptimizedStaticCall static_call(return_address); 227 UnoptimizedStaticCall static_call(return_address);
205 ICData& ic_data = ICData::Handle(); 228 ICData& ic_data = ICData::Handle();
206 ic_data ^= static_call.ic_data(); 229 ic_data ^= static_call.ic_data();
207 if (ic_data_result != NULL) { 230 if (ic_data_result != NULL) {
208 *ic_data_result = ic_data.raw(); 231 *ic_data_result = ic_data.raw();
209 } 232 }
210 return ic_data.GetTargetAt(0); 233 return ic_data.GetTargetAt(0);
211 } 234 }
212 235
213 236
214 void CodePatcher::PatchNativeCallAt(uword return_address, 237 void CodePatcher::PatchNativeCallAt(uword return_address,
215 const Code& code, 238 const Code& code,
216 NativeFunction target, 239 NativeFunction target,
217 const Code& trampoline) { 240 const Code& trampoline) {
218 UNREACHABLE(); 241 ASSERT(code.ContainsInstructionAt(return_address));
242 NativeCall call(return_address);
243 call.set_target(trampoline.EntryPoint());
244 call.set_native_function(target);
219 } 245 }
220 246
221 247
222 RawCode* CodePatcher::GetNativeCallAt(uword return_address, 248 uword CodePatcher::GetNativeCallAt(uword return_address,
223 const Code& code, 249 const Code& code,
224 NativeFunction* target) { 250 NativeFunction* target) {
225 UNREACHABLE(); 251 ASSERT(code.ContainsInstructionAt(return_address));
226 return NULL; 252 NativeCall call(return_address);
253 *target = call.native_function();
254 return call.target();
227 } 255 }
228 256
229 257
230 258
231 intptr_t CodePatcher::InstanceCallSizeInBytes() { 259 intptr_t CodePatcher::InstanceCallSizeInBytes() {
232 return InstanceCall::kPatternSize; 260 return InstanceCall::kNumInstructions * InstanceCall::kInstructionSize;
233 } 261 }
234 262
235 263
236 // The expected code pattern of an edge counter in unoptimized code: 264 // The expected code pattern of an edge counter in unoptimized code:
237 // b8 imm32 mov EAX, immediate 265 // b8 imm32 mov EAX, immediate
238 class EdgeCounter : public ValueObject { 266 class EdgeCounter : public ValueObject {
239 public: 267 public:
240 EdgeCounter(uword pc, const Code& ignored) 268 EdgeCounter(uword pc, const Code& ignored)
241 : end_(pc - FlowGraphCompiler::EdgeCounterIncrementSizeInBytes()) { 269 : end_(pc - FlowGraphCompiler::EdgeCounterIncrementSizeInBytes()) {
242 ASSERT(IsValid(end_)); 270 ASSERT(IsValid(end_));
(...skipping 14 matching lines...) Expand all
257 285
258 RawObject* CodePatcher::GetEdgeCounterAt(uword pc, const Code& code) { 286 RawObject* CodePatcher::GetEdgeCounterAt(uword pc, const Code& code) {
259 ASSERT(code.ContainsInstructionAt(pc)); 287 ASSERT(code.ContainsInstructionAt(pc));
260 EdgeCounter counter(pc, code); 288 EdgeCounter counter(pc, code);
261 return counter.edge_counter(); 289 return counter.edge_counter();
262 } 290 }
263 291
264 } // namespace dart 292 } // namespace dart
265 293
266 #endif // defined TARGET_ARCH_IA32 294 #endif // defined TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « runtime/vm/code_patcher_arm64.cc ('k') | runtime/vm/code_patcher_mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698