| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 73 RelocInfoWriter reloc_info_writer(reloc_end_address, code_start_address); | 73 RelocInfoWriter reloc_info_writer(reloc_end_address, code_start_address); |
| 74 | 74 |
| 75 // For each return after a safepoint insert a call to the corresponding | 75 // For each return after a safepoint insert a call to the corresponding |
| 76 // deoptimization entry. Since the call is a relative encoding, write new | 76 // deoptimization entry. Since the call is a relative encoding, write new |
| 77 // reloc info. We do not need any of the existing reloc info because the | 77 // reloc info. We do not need any of the existing reloc info because the |
| 78 // existing code will not be used again (we zap it in debug builds). | 78 // existing code will not be used again (we zap it in debug builds). |
| 79 SafepointTable table(code); | 79 SafepointTable table(code); |
| 80 Address prev_address = code_start_address; | 80 Address prev_address = code_start_address; |
| 81 for (unsigned i = 0; i < table.length(); ++i) { | 81 for (unsigned i = 0; i < table.length(); ++i) { |
| 82 Address curr_address = code_start_address + table.GetPcOffset(i); | 82 Address curr_address = code_start_address + table.GetPcOffset(i); |
| 83 ASSERT_GE(curr_address, prev_address); |
| 83 ZapCodeRange(prev_address, curr_address); | 84 ZapCodeRange(prev_address, curr_address); |
| 84 | 85 |
| 85 SafepointEntry safepoint_entry = table.GetEntry(i); | 86 SafepointEntry safepoint_entry = table.GetEntry(i); |
| 86 int deoptimization_index = safepoint_entry.deoptimization_index(); | 87 int deoptimization_index = safepoint_entry.deoptimization_index(); |
| 87 if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) { | 88 if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) { |
| 88 // The gap code is needed to get to the state expected at the bailout. | 89 // The gap code is needed to get to the state expected at the bailout. |
| 89 curr_address += safepoint_entry.gap_code_size(); | 90 curr_address += safepoint_entry.gap_code_size(); |
| 90 | 91 |
| 91 CodePatcher patcher(curr_address, patch_size()); | 92 CodePatcher patcher(curr_address, patch_size()); |
| 92 Address deopt_entry = GetDeoptimizationEntry(deoptimization_index, LAZY); | 93 Address deopt_entry = GetDeoptimizationEntry(deoptimization_index, LAZY); |
| 93 patcher.masm()->call(deopt_entry, RelocInfo::NONE); | 94 patcher.masm()->call(deopt_entry, RelocInfo::NONE); |
| 94 | 95 |
| 95 // We use RUNTIME_ENTRY for deoptimization bailouts. | 96 // We use RUNTIME_ENTRY for deoptimization bailouts. |
| 96 RelocInfo rinfo(curr_address + 1, // 1 after the call opcode. | 97 RelocInfo rinfo(curr_address + 1, // 1 after the call opcode. |
| 97 RelocInfo::RUNTIME_ENTRY, | 98 RelocInfo::RUNTIME_ENTRY, |
| 98 reinterpret_cast<intptr_t>(deopt_entry)); | 99 reinterpret_cast<intptr_t>(deopt_entry)); |
| 99 reloc_info_writer.Write(&rinfo); | 100 reloc_info_writer.Write(&rinfo); |
| 100 | 101 ASSERT_GE(reloc_info_writer.pos(), |
| 102 reloc_info->address() + ByteArray::kHeaderSize); |
| 101 curr_address += patch_size(); | 103 curr_address += patch_size(); |
| 102 } | 104 } |
| 103 prev_address = curr_address; | 105 prev_address = curr_address; |
| 104 } | 106 } |
| 105 ZapCodeRange(prev_address, | 107 ZapCodeRange(prev_address, |
| 106 code_start_address + code->safepoint_table_offset()); | 108 code_start_address + code->safepoint_table_offset()); |
| 107 | 109 |
| 108 // Move the relocation info to the beginning of the byte array. | 110 // Move the relocation info to the beginning of the byte array. |
| 109 int new_reloc_size = reloc_end_address - reloc_info_writer.pos(); | 111 int new_reloc_size = reloc_end_address - reloc_info_writer.pos(); |
| 110 memmove(code->relocation_start(), reloc_info_writer.pos(), new_reloc_size); | 112 memmove(code->relocation_start(), reloc_info_writer.pos(), new_reloc_size); |
| (...skipping 20 matching lines...) Expand all Loading... |
| 131 PrintF("[forced deoptimization: "); | 133 PrintF("[forced deoptimization: "); |
| 132 function->PrintName(); | 134 function->PrintName(); |
| 133 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); | 135 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); |
| 134 } | 136 } |
| 135 } | 137 } |
| 136 | 138 |
| 137 | 139 |
| 138 void Deoptimizer::PatchStackCheckCodeAt(Address pc_after, | 140 void Deoptimizer::PatchStackCheckCodeAt(Address pc_after, |
| 139 Code* check_code, | 141 Code* check_code, |
| 140 Code* replacement_code) { | 142 Code* replacement_code) { |
| 141 Address call_target_address = pc_after - kPointerSize; | 143 Address call_target_address = pc_after - kIntSize; |
| 142 ASSERT(check_code->entry() == | 144 ASSERT(check_code->entry() == |
| 143 Assembler::target_address_at(call_target_address)); | 145 Assembler::target_address_at(call_target_address)); |
| 144 // The stack check code matches the pattern: | 146 // The stack check code matches the pattern: |
| 145 // | 147 // |
| 146 // cmp esp, <limit> | 148 // cmp esp, <limit> |
| 147 // jae ok | 149 // jae ok |
| 148 // call <stack guard> | 150 // call <stack guard> |
| 149 // test eax, <loop nesting depth> | 151 // test eax, <loop nesting depth> |
| 150 // ok: ... | 152 // ok: ... |
| 151 // | 153 // |
| 152 // We will patch away the branch so the code is: | 154 // We will patch away the branch so the code is: |
| 153 // | 155 // |
| 154 // cmp esp, <limit> ;; Not changed | 156 // cmp esp, <limit> ;; Not changed |
| 155 // nop | 157 // nop |
| 156 // nop | 158 // nop |
| 157 // call <on-stack replacment> | 159 // call <on-stack replacment> |
| 158 // test eax, <loop nesting depth> | 160 // test eax, <loop nesting depth> |
| 159 // ok: | 161 // ok: |
| 160 ASSERT(*(call_target_address - 3) == 0x73 && // jae | 162 ASSERT(*(call_target_address - 3) == 0x73 && // jae |
| 161 *(call_target_address - 2) == 0x07 && // offset | 163 *(call_target_address - 2) == 0x07 && // offset |
| 162 *(call_target_address - 1) == 0xe8); // call | 164 *(call_target_address - 1) == 0xe8); // call |
| 163 *(call_target_address - 3) = 0x90; // nop | 165 *(call_target_address - 3) = 0x90; // nop |
| 164 *(call_target_address - 2) = 0x90; // nop | 166 *(call_target_address - 2) = 0x90; // nop |
| 165 Assembler::set_target_address_at(call_target_address, | 167 Assembler::set_target_address_at(call_target_address, |
| 166 replacement_code->entry()); | 168 replacement_code->entry()); |
| 167 } | 169 } |
| 168 | 170 |
| 169 | 171 |
| 170 void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, | 172 void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, |
| 171 Code* check_code, | 173 Code* check_code, |
| 172 Code* replacement_code) { | 174 Code* replacement_code) { |
| 173 Address call_target_address = pc_after - kPointerSize; | 175 Address call_target_address = pc_after - kIntSize; |
| 174 ASSERT(replacement_code->entry() == | 176 ASSERT(replacement_code->entry() == |
| 175 Assembler::target_address_at(call_target_address)); | 177 Assembler::target_address_at(call_target_address)); |
| 176 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 178 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
| 177 // restore the conditional branch. | 179 // restore the conditional branch. |
| 178 ASSERT(*(call_target_address - 3) == 0x90 && // nop | 180 ASSERT(*(call_target_address - 3) == 0x90 && // nop |
| 179 *(call_target_address - 2) == 0x90 && // nop | 181 *(call_target_address - 2) == 0x90 && // nop |
| 180 *(call_target_address - 1) == 0xe8); // call | 182 *(call_target_address - 1) == 0xe8); // call |
| 181 *(call_target_address - 3) = 0x73; // jae | 183 *(call_target_address - 3) = 0x73; // jae |
| 182 *(call_target_address - 2) = 0x07; // offset | 184 *(call_target_address - 2) = 0x07; // offset |
| 183 Assembler::set_target_address_at(call_target_address, | 185 Assembler::set_target_address_at(call_target_address, |
| (...skipping 240 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 424 output_frame->SetFrameSlot(output_offset, value); | 426 output_frame->SetFrameSlot(output_offset, value); |
| 425 intptr_t fp_value = top_address + output_offset; | 427 intptr_t fp_value = top_address + output_offset; |
| 426 ASSERT(!is_bottommost || input_->GetRegister(ebp.code()) == fp_value); | 428 ASSERT(!is_bottommost || input_->GetRegister(ebp.code()) == fp_value); |
| 427 output_frame->SetFp(fp_value); | 429 output_frame->SetFp(fp_value); |
| 428 if (is_topmost) output_frame->SetRegister(ebp.code(), fp_value); | 430 if (is_topmost) output_frame->SetRegister(ebp.code(), fp_value); |
| 429 if (FLAG_trace_deopt) { | 431 if (FLAG_trace_deopt) { |
| 430 PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n", | 432 PrintF(" 0x%08x: [top + %d] <- 0x%08x ; caller's fp\n", |
| 431 fp_value, output_offset, value); | 433 fp_value, output_offset, value); |
| 432 } | 434 } |
| 433 | 435 |
| 434 // The context can be gotten from the function so long as we don't | 436 // For the bottommost output frame the context can be gotten from the input |
| 435 // optimize functions that need local contexts. | 437 // frame. For all subsequent output frames it can be gotten from the function |
| 438 // so long as we don't inline functions that need local contexts. |
| 436 output_offset -= kPointerSize; | 439 output_offset -= kPointerSize; |
| 437 input_offset -= kPointerSize; | 440 input_offset -= kPointerSize; |
| 438 value = reinterpret_cast<uint32_t>(function->context()); | 441 if (is_bottommost) { |
| 439 // The context for the bottommost output frame should also agree with the | 442 value = input_->GetFrameSlot(input_offset); |
| 440 // input frame. | 443 } else { |
| 441 ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value); | 444 value = reinterpret_cast<uint32_t>(function->context()); |
| 445 } |
| 442 output_frame->SetFrameSlot(output_offset, value); | 446 output_frame->SetFrameSlot(output_offset, value); |
| 443 if (is_topmost) output_frame->SetRegister(esi.code(), value); | 447 if (is_topmost) output_frame->SetRegister(esi.code(), value); |
| 444 if (FLAG_trace_deopt) { | 448 if (FLAG_trace_deopt) { |
| 445 PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n", | 449 PrintF(" 0x%08x: [top + %d] <- 0x%08x ; context\n", |
| 446 top_address + output_offset, output_offset, value); | 450 top_address + output_offset, output_offset, value); |
| 447 } | 451 } |
| 448 | 452 |
| 449 // The function was mentioned explicitly in the BEGIN_FRAME. | 453 // The function was mentioned explicitly in the BEGIN_FRAME. |
| 450 output_offset -= kPointerSize; | 454 output_offset -= kPointerSize; |
| 451 input_offset -= kPointerSize; | 455 input_offset -= kPointerSize; |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 654 } | 658 } |
| 655 __ bind(&done); | 659 __ bind(&done); |
| 656 } | 660 } |
| 657 | 661 |
| 658 #undef __ | 662 #undef __ |
| 659 | 663 |
| 660 | 664 |
| 661 } } // namespace v8::internal | 665 } } // namespace v8::internal |
| 662 | 666 |
| 663 #endif // V8_TARGET_ARCH_IA32 | 667 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |