| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 94 // We patch the code to the following form: | 94 // We patch the code to the following form: |
| 95 // | 95 // |
| 96 // <decrement profiling counter> | 96 // <decrement profiling counter> |
| 97 // e1 a0 00 00 mov r0, r0 (NOP) | 97 // e1 a0 00 00 mov r0, r0 (NOP) |
| 98 // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>] | 98 // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>] |
| 99 // e1 2f ff 3c blx ip | 99 // e1 2f ff 3c blx ip |
| 100 // ok-label | 100 // ok-label |
| 101 | 101 |
| 102 void Deoptimizer::PatchInterruptCodeAt(Code* unoptimized_code, | 102 void Deoptimizer::PatchInterruptCodeAt(Code* unoptimized_code, |
| 103 Address pc_after, | 103 Address pc_after, |
| 104 Code* interrupt_code, | |
| 105 Code* replacement_code) { | 104 Code* replacement_code) { |
| 106 ASSERT(!InterruptCodeIsPatched(unoptimized_code, | |
| 107 pc_after, | |
| 108 interrupt_code, | |
| 109 replacement_code)); | |
| 110 static const int kInstrSize = Assembler::kInstrSize; | 105 static const int kInstrSize = Assembler::kInstrSize; |
| 111 // Turn the jump into nops. | 106 // Turn the jump into nops. |
| 112 CodePatcher patcher(pc_after - 3 * kInstrSize, 1); | 107 CodePatcher patcher(pc_after - 3 * kInstrSize, 1); |
| 113 patcher.masm()->nop(); | 108 patcher.masm()->nop(); |
| 114 // Replace the call address. | 109 // Replace the call address. |
| 115 uint32_t interrupt_address_offset = Memory::uint16_at(pc_after - | 110 uint32_t interrupt_address_offset = Memory::uint16_at(pc_after - |
| 116 2 * kInstrSize) & 0xfff; | 111 2 * kInstrSize) & 0xfff; |
| 117 Address interrupt_address_pointer = pc_after + interrupt_address_offset; | 112 Address interrupt_address_pointer = pc_after + interrupt_address_offset; |
| 118 Memory::uint32_at(interrupt_address_pointer) = | 113 Memory::uint32_at(interrupt_address_pointer) = |
| 119 reinterpret_cast<uint32_t>(replacement_code->entry()); | 114 reinterpret_cast<uint32_t>(replacement_code->entry()); |
| 120 | 115 |
| 121 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 116 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 122 unoptimized_code, pc_after - 2 * kInstrSize, replacement_code); | 117 unoptimized_code, pc_after - 2 * kInstrSize, replacement_code); |
| 123 } | 118 } |
| 124 | 119 |
| 125 | 120 |
| 126 void Deoptimizer::RevertInterruptCodeAt(Code* unoptimized_code, | 121 void Deoptimizer::RevertInterruptCodeAt(Code* unoptimized_code, |
| 127 Address pc_after, | 122 Address pc_after, |
| 128 Code* interrupt_code, | 123 Code* interrupt_code) { |
| 129 Code* replacement_code) { | |
| 130 ASSERT(InterruptCodeIsPatched(unoptimized_code, | |
| 131 pc_after, | |
| 132 interrupt_code, | |
| 133 replacement_code)); | |
| 134 static const int kInstrSize = Assembler::kInstrSize; | 124 static const int kInstrSize = Assembler::kInstrSize; |
| 135 // Restore the original jump. | 125 // Restore the original jump. |
| 136 CodePatcher patcher(pc_after - 3 * kInstrSize, 1); | 126 CodePatcher patcher(pc_after - 3 * kInstrSize, 1); |
| 137 patcher.masm()->b(4 * kInstrSize, pl); // ok-label is 4 instructions later. | 127 patcher.masm()->b(4 * kInstrSize, pl); // ok-label is 4 instructions later. |
| 138 ASSERT_EQ(kBranchBeforeInterrupt, | 128 ASSERT_EQ(kBranchBeforeInterrupt, |
| 139 Memory::int32_at(pc_after - 3 * kInstrSize)); | 129 Memory::int32_at(pc_after - 3 * kInstrSize)); |
| 140 // Restore the original call address. | 130 // Restore the original call address. |
| 141 uint32_t interrupt_address_offset = Memory::uint16_at(pc_after - | 131 uint32_t interrupt_address_offset = Memory::uint16_at(pc_after - |
| 142 2 * kInstrSize) & 0xfff; | 132 2 * kInstrSize) & 0xfff; |
| 143 Address interrupt_address_pointer = pc_after + interrupt_address_offset; | 133 Address interrupt_address_pointer = pc_after + interrupt_address_offset; |
| 144 Memory::uint32_at(interrupt_address_pointer) = | 134 Memory::uint32_at(interrupt_address_pointer) = |
| 145 reinterpret_cast<uint32_t>(interrupt_code->entry()); | 135 reinterpret_cast<uint32_t>(interrupt_code->entry()); |
| 146 | 136 |
| 147 interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 137 interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 148 unoptimized_code, pc_after - 2 * kInstrSize, interrupt_code); | 138 unoptimized_code, pc_after - 2 * kInstrSize, interrupt_code); |
| 149 } | 139 } |
| 150 | 140 |
| 151 | 141 |
| 152 #ifdef DEBUG | 142 #ifdef DEBUG |
| 153 bool Deoptimizer::InterruptCodeIsPatched(Code* unoptimized_code, | 143 Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState( |
| 154 Address pc_after, | 144 Isolate* isolate, |
| 155 Code* interrupt_code, | 145 Code* unoptimized_code, |
| 156 Code* replacement_code) { | 146 Address pc_after) { |
| 157 static const int kInstrSize = Assembler::kInstrSize; | 147 static const int kInstrSize = Assembler::kInstrSize; |
| 158 ASSERT(Memory::int32_at(pc_after - kInstrSize) == kBlxIp); | 148 ASSERT(Memory::int32_at(pc_after - kInstrSize) == kBlxIp); |
| 159 | 149 |
| 160 uint32_t interrupt_address_offset = | 150 uint32_t interrupt_address_offset = |
| 161 Memory::uint16_at(pc_after - 2 * kInstrSize) & 0xfff; | 151 Memory::uint16_at(pc_after - 2 * kInstrSize) & 0xfff; |
| 162 Address interrupt_address_pointer = pc_after + interrupt_address_offset; | 152 Address interrupt_address_pointer = pc_after + interrupt_address_offset; |
| 163 | 153 |
| 164 if (Assembler::IsNop(Assembler::instr_at(pc_after - 3 * kInstrSize))) { | 154 if (Assembler::IsNop(Assembler::instr_at(pc_after - 3 * kInstrSize))) { |
| 165 ASSERT(Assembler::IsLdrPcImmediateOffset( | 155 ASSERT(Assembler::IsLdrPcImmediateOffset( |
| 166 Assembler::instr_at(pc_after - 2 * kInstrSize))); | 156 Assembler::instr_at(pc_after - 2 * kInstrSize))); |
| 167 ASSERT(reinterpret_cast<uint32_t>(replacement_code->entry()) == | 157 Code* osr_builtin = |
| 158 isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
| 159 ASSERT(reinterpret_cast<uint32_t>(osr_builtin->entry()) == |
| 168 Memory::uint32_at(interrupt_address_pointer)); | 160 Memory::uint32_at(interrupt_address_pointer)); |
| 169 return true; | 161 return PATCHED_FOR_OSR; |
| 170 } else { | 162 } else { |
| 163 // Get the interrupt stub code object to match against from cache. |
| 164 Code* interrupt_code = NULL; |
| 165 InterruptStub stub; |
| 166 if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE(); |
| 171 ASSERT(Assembler::IsLdrPcImmediateOffset( | 167 ASSERT(Assembler::IsLdrPcImmediateOffset( |
| 172 Assembler::instr_at(pc_after - 2 * kInstrSize))); | 168 Assembler::instr_at(pc_after - 2 * kInstrSize))); |
| 173 ASSERT_EQ(kBranchBeforeInterrupt, | 169 ASSERT_EQ(kBranchBeforeInterrupt, |
| 174 Memory::int32_at(pc_after - 3 * kInstrSize)); | 170 Memory::int32_at(pc_after - 3 * kInstrSize)); |
| 175 ASSERT(reinterpret_cast<uint32_t>(interrupt_code->entry()) == | 171 ASSERT(reinterpret_cast<uint32_t>(interrupt_code->entry()) == |
| 176 Memory::uint32_at(interrupt_address_pointer)); | 172 Memory::uint32_at(interrupt_address_pointer)); |
| 177 return false; | 173 return NOT_PATCHED; |
| 178 } | 174 } |
| 179 } | 175 } |
| 180 #endif // DEBUG | 176 #endif // DEBUG |
| 181 | 177 |
| 182 | 178 |
| 183 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { | 179 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { |
| 184 ByteArray* translations = data->TranslationByteArray(); | 180 ByteArray* translations = data->TranslationByteArray(); |
| 185 int length = data->DeoptCount(); | 181 int length = data->DeoptCount(); |
| 186 for (int i = 0; i < length; i++) { | 182 for (int i = 0; i < length; i++) { |
| 187 if (data->AstId(i) == ast_id) { | 183 if (data->AstId(i) == ast_id) { |
| (...skipping 421 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 609 | 605 |
| 610 | 606 |
| 611 void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { | 607 void FrameDescription::SetCallerFp(unsigned offset, intptr_t value) { |
| 612 SetFrameSlot(offset, value); | 608 SetFrameSlot(offset, value); |
| 613 } | 609 } |
| 614 | 610 |
| 615 | 611 |
| 616 #undef __ | 612 #undef __ |
| 617 | 613 |
| 618 } } // namespace v8::internal | 614 } } // namespace v8::internal |
| OLD | NEW |