OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 192 matching lines...) Loading... | |
203 if (FLAG_trace_deopt) { | 203 if (FLAG_trace_deopt) { |
204 PrintF("[forced deoptimization: "); | 204 PrintF("[forced deoptimization: "); |
205 function->PrintName(); | 205 function->PrintName(); |
206 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); | 206 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); |
207 } | 207 } |
208 } | 208 } |
209 | 209 |
210 | 210 |
211 static const byte kJnsInstruction = 0x79; | 211 static const byte kJnsInstruction = 0x79; |
212 static const byte kJnsOffset = 0x13; | 212 static const byte kJnsOffset = 0x13; |
213 static const byte kJaeInstruction = 0x73; | 213 static const byte kJaeInstruction = 0x73; |
Jakob Kummerow
2012/12/06 16:25:41
not needed anymore, please remove.
Sven Panne
2012/12/07 08:54:56
Done.
| |
214 static const byte kJaeOffset = 0x07; | 214 static const byte kJaeOffset = 0x07; |
Jakob Kummerow
2012/12/06 16:25:41
not needed anymore, please remove.
Sven Panne
2012/12/07 08:54:56
Done.
| |
215 static const byte kCallInstruction = 0xe8; | 215 static const byte kCallInstruction = 0xe8; |
216 static const byte kNopByteOne = 0x66; | 216 static const byte kNopByteOne = 0x66; |
217 static const byte kNopByteTwo = 0x90; | 217 static const byte kNopByteTwo = 0x90; |
218 | 218 |
219 | 219 |
220 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, | 220 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, |
221 Address pc_after, | 221 Address pc_after, |
222 Code* check_code, | 222 Code* check_code, |
223 Code* replacement_code) { | 223 Code* replacement_code) { |
224 Address call_target_address = pc_after - kIntSize; | 224 Address call_target_address = pc_after - kIntSize; |
225 ASSERT_EQ(check_code->entry(), | 225 ASSERT_EQ(check_code->entry(), |
226 Assembler::target_address_at(call_target_address)); | 226 Assembler::target_address_at(call_target_address)); |
227 // The stack check code matches the pattern: | 227 // The stack check code matches the pattern: |
228 // | 228 // |
229 // cmp esp, <limit> | 229 // cmp esp, <limit> |
Jakob Kummerow
2012/12/06 16:25:41
Please update this comment. Feel free to use pseud
Sven Panne
2012/12/07 08:54:56
Done.
| |
230 // jae ok | 230 // jae ok |
231 // call <stack guard> | 231 // call <stack guard> |
232 // test eax, <loop nesting depth> | 232 // test eax, <loop nesting depth> |
233 // ok: ... | 233 // ok: ... |
234 // | 234 // |
235 // We will patch away the branch so the code is: | 235 // We will patch away the branch so the code is: |
236 // | 236 // |
237 // cmp esp, <limit> ;; Not changed | 237 // cmp esp, <limit> ;; Not changed |
238 // nop | 238 // nop |
239 // nop | 239 // nop |
240 // call <on-stack replacment> | 240 // call <on-stack replacment> |
241 // test eax, <loop nesting depth> | 241 // test eax, <loop nesting depth> |
242 // ok: | 242 // ok: |
243 | 243 |
244 if (FLAG_count_based_interrupts) { | 244 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); |
245 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); | 245 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); |
246 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); | 246 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
247 } else { | |
248 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3)); | |
249 ASSERT_EQ(kJaeOffset, *(call_target_address - 2)); | |
250 } | |
251 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); | |
252 *(call_target_address - 3) = kNopByteOne; | 247 *(call_target_address - 3) = kNopByteOne; |
253 *(call_target_address - 2) = kNopByteTwo; | 248 *(call_target_address - 2) = kNopByteTwo; |
254 Assembler::set_target_address_at(call_target_address, | 249 Assembler::set_target_address_at(call_target_address, |
255 replacement_code->entry()); | 250 replacement_code->entry()); |
256 | 251 |
257 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 252 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
258 unoptimized_code, call_target_address, replacement_code); | 253 unoptimized_code, call_target_address, replacement_code); |
259 } | 254 } |
260 | 255 |
261 | 256 |
262 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 257 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, |
263 Address pc_after, | 258 Address pc_after, |
264 Code* check_code, | 259 Code* check_code, |
265 Code* replacement_code) { | 260 Code* replacement_code) { |
266 Address call_target_address = pc_after - kIntSize; | 261 Address call_target_address = pc_after - kIntSize; |
267 ASSERT_EQ(replacement_code->entry(), | 262 ASSERT_EQ(replacement_code->entry(), |
268 Assembler::target_address_at(call_target_address)); | 263 Assembler::target_address_at(call_target_address)); |
269 | 264 |
270 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 265 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
271 // restore the conditional branch. | 266 // restore the conditional branch. |
272 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); | 267 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); |
273 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); | 268 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); |
274 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); | 269 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
275 if (FLAG_count_based_interrupts) { | 270 *(call_target_address - 3) = kJnsInstruction; |
276 *(call_target_address - 3) = kJnsInstruction; | 271 *(call_target_address - 2) = kJnsOffset; |
277 *(call_target_address - 2) = kJnsOffset; | |
278 } else { | |
279 *(call_target_address - 3) = kJaeInstruction; | |
280 *(call_target_address - 2) = kJaeOffset; | |
281 } | |
282 Assembler::set_target_address_at(call_target_address, | 272 Assembler::set_target_address_at(call_target_address, |
283 check_code->entry()); | 273 check_code->entry()); |
284 | 274 |
285 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 275 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
286 unoptimized_code, call_target_address, check_code); | 276 unoptimized_code, call_target_address, check_code); |
287 } | 277 } |
288 | 278 |
289 | 279 |
290 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { | 280 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { |
291 ByteArray* translations = data->TranslationByteArray(); | 281 ByteArray* translations = data->TranslationByteArray(); |
(...skipping 990 matching lines...) Loading... | |
1282 } | 1272 } |
1283 __ bind(&done); | 1273 __ bind(&done); |
1284 } | 1274 } |
1285 | 1275 |
1286 #undef __ | 1276 #undef __ |
1287 | 1277 |
1288 | 1278 |
1289 } } // namespace v8::internal | 1279 } } // namespace v8::internal |
1290 | 1280 |
1291 #endif // V8_TARGET_ARCH_IA32 | 1281 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |