| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 203 if (FLAG_trace_deopt) { | 203 if (FLAG_trace_deopt) { |
| 204 PrintF("[forced deoptimization: "); | 204 PrintF("[forced deoptimization: "); |
| 205 function->PrintName(); | 205 function->PrintName(); |
| 206 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); | 206 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); |
| 207 } | 207 } |
| 208 } | 208 } |
| 209 | 209 |
| 210 | 210 |
| 211 static const byte kJnsInstruction = 0x79; | 211 static const byte kJnsInstruction = 0x79; |
| 212 static const byte kJnsOffset = 0x13; | 212 static const byte kJnsOffset = 0x13; |
| 213 static const byte kJaeInstruction = 0x73; | |
| 214 static const byte kJaeOffset = 0x07; | |
| 215 static const byte kCallInstruction = 0xe8; | 213 static const byte kCallInstruction = 0xe8; |
| 216 static const byte kNopByteOne = 0x66; | 214 static const byte kNopByteOne = 0x66; |
| 217 static const byte kNopByteTwo = 0x90; | 215 static const byte kNopByteTwo = 0x90; |
| 218 | 216 |
| 219 | 217 |
| 220 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, | 218 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, |
| 221 Address pc_after, | 219 Address pc_after, |
| 222 Code* check_code, | 220 Code* check_code, |
| 223 Code* replacement_code) { | 221 Code* replacement_code) { |
| 224 Address call_target_address = pc_after - kIntSize; | 222 Address call_target_address = pc_after - kIntSize; |
| 225 ASSERT_EQ(check_code->entry(), | 223 ASSERT_EQ(check_code->entry(), |
| 226 Assembler::target_address_at(call_target_address)); | 224 Assembler::target_address_at(call_target_address)); |
| 227 // The stack check code matches the pattern: | 225 // The back edge bookkeeping code matches the pattern: |
| 228 // | 226 // |
| 229 // cmp esp, <limit> | 227 // sub <profiling_counter>, <delta> |
| 230 // jae ok | 228 // jns ok |
| 231 // call <stack guard> | 229 // call <stack guard> |
| 232 // test eax, <loop nesting depth> | 230 // test eax, <loop nesting depth> |
| 233 // ok: ... | 231 // ok: ... |
| 234 // | 232 // |
| 235 // We will patch away the branch so the code is: | 233 // We will patch away the branch so the code is: |
| 236 // | 234 // |
| 237 // cmp esp, <limit> ;; Not changed | 235 // sub <profiling_counter>, <delta> ;; Not changed |
| 238 // nop | 236 // nop |
| 239 // nop | 237 // nop |
| 240 // call <on-stack replacment> | 238 // call <on-stack replacment> |
| 241 // test eax, <loop nesting depth> | 239 // test eax, <loop nesting depth> |
| 242 // ok: | 240 // ok: |
| 243 | 241 |
| 244 if (FLAG_count_based_interrupts) { | 242 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); |
| 245 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); | 243 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); |
| 246 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); | 244 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
| 247 } else { | |
| 248 ASSERT_EQ(kJaeInstruction, *(call_target_address - 3)); | |
| 249 ASSERT_EQ(kJaeOffset, *(call_target_address - 2)); | |
| 250 } | |
| 251 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); | |
| 252 *(call_target_address - 3) = kNopByteOne; | 245 *(call_target_address - 3) = kNopByteOne; |
| 253 *(call_target_address - 2) = kNopByteTwo; | 246 *(call_target_address - 2) = kNopByteTwo; |
| 254 Assembler::set_target_address_at(call_target_address, | 247 Assembler::set_target_address_at(call_target_address, |
| 255 replacement_code->entry()); | 248 replacement_code->entry()); |
| 256 | 249 |
| 257 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 250 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 258 unoptimized_code, call_target_address, replacement_code); | 251 unoptimized_code, call_target_address, replacement_code); |
| 259 } | 252 } |
| 260 | 253 |
| 261 | 254 |
| 262 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 255 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, |
| 263 Address pc_after, | 256 Address pc_after, |
| 264 Code* check_code, | 257 Code* check_code, |
| 265 Code* replacement_code) { | 258 Code* replacement_code) { |
| 266 Address call_target_address = pc_after - kIntSize; | 259 Address call_target_address = pc_after - kIntSize; |
| 267 ASSERT_EQ(replacement_code->entry(), | 260 ASSERT_EQ(replacement_code->entry(), |
| 268 Assembler::target_address_at(call_target_address)); | 261 Assembler::target_address_at(call_target_address)); |
| 269 | 262 |
| 270 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 263 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
| 271 // restore the conditional branch. | 264 // restore the conditional branch. |
| 272 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); | 265 ASSERT_EQ(kNopByteOne, *(call_target_address - 3)); |
| 273 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); | 266 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); |
| 274 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); | 267 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
| 275 if (FLAG_count_based_interrupts) { | 268 *(call_target_address - 3) = kJnsInstruction; |
| 276 *(call_target_address - 3) = kJnsInstruction; | 269 *(call_target_address - 2) = kJnsOffset; |
| 277 *(call_target_address - 2) = kJnsOffset; | |
| 278 } else { | |
| 279 *(call_target_address - 3) = kJaeInstruction; | |
| 280 *(call_target_address - 2) = kJaeOffset; | |
| 281 } | |
| 282 Assembler::set_target_address_at(call_target_address, | 270 Assembler::set_target_address_at(call_target_address, |
| 283 check_code->entry()); | 271 check_code->entry()); |
| 284 | 272 |
| 285 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 273 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 286 unoptimized_code, call_target_address, check_code); | 274 unoptimized_code, call_target_address, check_code); |
| 287 } | 275 } |
| 288 | 276 |
| 289 | 277 |
| 290 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { | 278 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { |
| 291 ByteArray* translations = data->TranslationByteArray(); | 279 ByteArray* translations = data->TranslationByteArray(); |
| (...skipping 990 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1282 } | 1270 } |
| 1283 __ bind(&done); | 1271 __ bind(&done); |
| 1284 } | 1272 } |
| 1285 | 1273 |
| 1286 #undef __ | 1274 #undef __ |
| 1287 | 1275 |
| 1288 | 1276 |
| 1289 } } // namespace v8::internal | 1277 } } // namespace v8::internal |
| 1290 | 1278 |
| 1291 #endif // V8_TARGET_ARCH_IA32 | 1279 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |