| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 98 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 109 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); | 109 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); |
| 110 #ifdef DEBUG | 110 #ifdef DEBUG |
| 111 if (FLAG_print_code) { | 111 if (FLAG_print_code) { |
| 112 code->PrintLn(); | 112 code->PrintLn(); |
| 113 } | 113 } |
| 114 #endif | 114 #endif |
| 115 } | 115 } |
| 116 } | 116 } |
| 117 | 117 |
| 118 | 118 |
| 119 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, | 119 // This structure comes from FullCodeGenerator::EmitBackEdgeBookkeeping. |
| 120 // The back edge bookkeeping code matches the pattern: |
| 121 // |
| 122 // sltu at, sp, t0 / slt at, a3, zero_reg (in case of count based interrupts) |
| 123 // beq at, zero_reg, ok |
| 124 // lui t9, <interrupt stub address> upper |
| 125 // ori t9, <interrupt stub address> lower |
| 126 // jalr t9 |
| 127 // nop |
| 128 // ok-label ----- pc_after points here |
| 129 // |
| 130 // We patch the code to the following form: |
| 131 // |
| 132 // addiu at, zero_reg, 1 |
| 133 // beq at, zero_reg, ok ;; Not changed |
| 134 // lui t9, <on-stack replacement address> upper |
| 135 // ori t9, <on-stack replacement address> lower |
| 136 // jalr t9 ;; Not changed |
| 137 // nop ;; Not changed |
| 138 // ok-label ----- pc_after points here |
| 139 |
| 140 void Deoptimizer::PatchInterruptCodeAt(Code* unoptimized_code, |
| 120 Address pc_after, | 141 Address pc_after, |
| 121 Code* check_code, | 142 Code* interrupt_code, |
| 122 Code* replacement_code) { | 143 Code* replacement_code) { |
| 123 const int kInstrSize = Assembler::kInstrSize; | 144 ASSERT(!InterruptCodeIsPatched(unoptimized_code, |
| 124 // This structure comes from FullCodeGenerator::EmitBackEdgeBookkeeping. | 145 pc_after, |
| 125 // The call of the stack guard check has the following form: | 146 interrupt_code, |
| 126 // sltu at, sp, t0 / slt at, a3, zero_reg (in case of count based interrupts) | 147 replacement_code)); |
| 127 // beq at, zero_reg, ok | 148 static const int kInstrSize = Assembler::kInstrSize; |
| 128 // lui t9, <stack guard address> upper | |
| 129 // ori t9, <stack guard address> lower | |
| 130 // jalr t9 | |
| 131 // nop | |
| 132 // ----- pc_after points here | |
| 133 | |
| 134 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc_after - 5 * kInstrSize))); | |
| 135 | |
| 136 // Replace the sltu instruction with load-imm 1 to at, so beq is not taken. | 149 // Replace the sltu instruction with load-imm 1 to at, so beq is not taken. |
| 137 CodePatcher patcher(pc_after - 6 * kInstrSize, 1); | 150 CodePatcher patcher(pc_after - 6 * kInstrSize, 1); |
| 138 patcher.masm()->addiu(at, zero_reg, 1); | 151 patcher.masm()->addiu(at, zero_reg, 1); |
| 139 | |
| 140 // Replace the stack check address in the load-immediate (lui/ori pair) | 152 // Replace the stack check address in the load-immediate (lui/ori pair) |
| 141 // with the entry address of the replacement code. | 153 // with the entry address of the replacement code. |
| 142 ASSERT(reinterpret_cast<uint32_t>( | |
| 143 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == | |
| 144 reinterpret_cast<uint32_t>(check_code->entry())); | |
| 145 Assembler::set_target_address_at(pc_after - 4 * kInstrSize, | 154 Assembler::set_target_address_at(pc_after - 4 * kInstrSize, |
| 146 replacement_code->entry()); | 155 replacement_code->entry()); |
| 147 | 156 |
| 148 // We patched the code to the following form: | |
| 149 // addiu at, zero_reg, 1 | |
| 150 // beq at, zero_reg, ok ;; Not changed | |
| 151 // lui t9, <on-stack replacement address> upper | |
| 152 // ori t9, <on-stack replacement address> lower | |
| 153 // jalr t9 ;; Not changed | |
| 154 // nop ;; Not changed | |
| 155 // ----- pc_after points here | |
| 156 | |
| 157 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 157 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 158 unoptimized_code, pc_after - 4 * kInstrSize, replacement_code); | 158 unoptimized_code, pc_after - 4 * kInstrSize, replacement_code); |
| 159 } | 159 } |
| 160 | 160 |
| 161 | 161 |
| 162 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code, | 162 void Deoptimizer::RevertInterruptCodeAt(Code* unoptimized_code, |
| 163 Address pc_after, | 163 Address pc_after, |
| 164 Code* check_code, | 164 Code* interrupt_code, |
| 165 Code* replacement_code) { | 165 Code* replacement_code) { |
| 166 // Exact opposite of the function above. | 166 ASSERT(InterruptCodeIsPatched(unoptimized_code, |
| 167 const int kInstrSize = Assembler::kInstrSize; | 167 pc_after, |
| 168 ASSERT(Assembler::IsAddImmediate( | 168 interrupt_code, |
| 169 Assembler::instr_at(pc_after - 6 * kInstrSize))); | 169 replacement_code)); |
| 170 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc_after - 5 * kInstrSize))); | 170 static const int kInstrSize = Assembler::kInstrSize; |
| 171 | |
| 172 // Restore the sltu instruction so beq can be taken again. | 171 // Restore the sltu instruction so beq can be taken again. |
| 173 CodePatcher patcher(pc_after - 6 * kInstrSize, 1); | 172 CodePatcher patcher(pc_after - 6 * kInstrSize, 1); |
| 174 patcher.masm()->slt(at, a3, zero_reg); | 173 patcher.masm()->slt(at, a3, zero_reg); |
| 174 // Restore the original call address. |
| 175 Assembler::set_target_address_at(pc_after - 4 * kInstrSize, |
| 176 interrupt_code->entry()); |
| 175 | 177 |
| 176 // Replace the on-stack replacement address in the load-immediate (lui/ori | 178 interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 177 // pair) with the entry address of the normal stack-check code. | 179 unoptimized_code, pc_after - 4 * kInstrSize, interrupt_code); |
| 178 ASSERT(reinterpret_cast<uint32_t>( | |
| 179 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == | |
| 180 reinterpret_cast<uint32_t>(replacement_code->entry())); | |
| 181 Assembler::set_target_address_at(pc_after - 4 * kInstrSize, | |
| 182 check_code->entry()); | |
| 183 | |
| 184 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | |
| 185 unoptimized_code, pc_after - 4 * kInstrSize, check_code); | |
| 186 } | 180 } |
| 187 | 181 |
| 188 | 182 |
| 183 #ifdef DEBUG |
| 184 bool Deoptimizer::InterruptCodeIsPatched(Code* unoptimized_code, |
| 185 Address pc_after, |
| 186 Code* interrupt_code, |
| 187 Code* replacement_code) { |
| 188 static const int kInstrSize = Assembler::kInstrSize; |
| 189 ASSERT(Assembler::IsBeq(Assembler::instr_at(pc_after - 5 * kInstrSize))); |
| 190 if (Assembler::IsAddImmediate( |
| 191 Assembler::instr_at(pc_after - 6 * kInstrSize))) { |
| 192 ASSERT(reinterpret_cast<uint32_t>( |
| 193 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == |
| 194 reinterpret_cast<uint32_t>(replacement_code->entry())); |
| 195 return true; |
| 196 } else { |
| 197 ASSERT(reinterpret_cast<uint32_t>( |
| 198 Assembler::target_address_at(pc_after - 4 * kInstrSize)) == |
| 199 reinterpret_cast<uint32_t>(interrupt_code->entry())); |
| 200 return false; |
| 201 } |
| 202 } |
| 203 #endif // DEBUG |
| 204 |
| 205 |
| 189 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { | 206 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { |
| 190 ByteArray* translations = data->TranslationByteArray(); | 207 ByteArray* translations = data->TranslationByteArray(); |
| 191 int length = data->DeoptCount(); | 208 int length = data->DeoptCount(); |
| 192 for (int i = 0; i < length; i++) { | 209 for (int i = 0; i < length; i++) { |
| 193 if (data->AstId(i) == ast_id) { | 210 if (data->AstId(i) == ast_id) { |
| 194 TranslationIterator it(translations, data->TranslationIndex(i)->value()); | 211 TranslationIterator it(translations, data->TranslationIndex(i)->value()); |
| 195 int value = it.Next(); | 212 int value = it.Next(); |
| 196 ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value)); | 213 ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value)); |
| 197 // Read the number of frames. | 214 // Read the number of frames. |
| 198 value = it.Next(); | 215 value = it.Next(); |
| (...skipping 631 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 830 } | 847 } |
| 831 | 848 |
| 832 ASSERT_EQ(masm()->SizeOfCodeGeneratedSince(&table_start), | 849 ASSERT_EQ(masm()->SizeOfCodeGeneratedSince(&table_start), |
| 833 count() * table_entry_size_); | 850 count() * table_entry_size_); |
| 834 } | 851 } |
| 835 | 852 |
| 836 #undef __ | 853 #undef __ |
| 837 | 854 |
| 838 | 855 |
| 839 } } // namespace v8::internal | 856 } } // namespace v8::internal |
| OLD | NEW |