| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 98 // We will patch away the branch so the code is: | 98 // We will patch away the branch so the code is: |
| 99 // | 99 // |
| 100 // add <profiling_counter>, <-delta> ;; Not changed | 100 // add <profiling_counter>, <-delta> ;; Not changed |
| 101 // nop | 101 // nop |
| 102 // nop | 102 // nop |
| 103 // call <on-stack replacment> | 103 // call <on-stack replacment> |
| 104 // ok: | 104 // ok: |
| 105 | 105 |
| 106 void Deoptimizer::PatchInterruptCodeAt(Code* unoptimized_code, | 106 void Deoptimizer::PatchInterruptCodeAt(Code* unoptimized_code, |
| 107 Address pc_after, | 107 Address pc_after, |
| 108 Code* interrupt_code, | |
| 109 Code* replacement_code) { | 108 Code* replacement_code) { |
| 110 ASSERT(!InterruptCodeIsPatched(unoptimized_code, | |
| 111 pc_after, | |
| 112 interrupt_code, | |
| 113 replacement_code)); | |
| 114 // Turn the jump into nops. | 109 // Turn the jump into nops. |
| 115 Address call_target_address = pc_after - kIntSize; | 110 Address call_target_address = pc_after - kIntSize; |
| 116 *(call_target_address - 3) = kNopByteOne; | 111 *(call_target_address - 3) = kNopByteOne; |
| 117 *(call_target_address - 2) = kNopByteTwo; | 112 *(call_target_address - 2) = kNopByteTwo; |
| 118 // Replace the call address. | 113 // Replace the call address. |
| 119 Assembler::set_target_address_at(call_target_address, | 114 Assembler::set_target_address_at(call_target_address, |
| 120 replacement_code->entry()); | 115 replacement_code->entry()); |
| 121 | 116 |
| 122 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 117 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 123 unoptimized_code, call_target_address, replacement_code); | 118 unoptimized_code, call_target_address, replacement_code); |
| 124 } | 119 } |
| 125 | 120 |
| 126 | 121 |
| 127 void Deoptimizer::RevertInterruptCodeAt(Code* unoptimized_code, | 122 void Deoptimizer::RevertInterruptCodeAt(Code* unoptimized_code, |
| 128 Address pc_after, | 123 Address pc_after, |
| 129 Code* interrupt_code, | 124 Code* interrupt_code) { |
| 130 Code* replacement_code) { | |
| 131 ASSERT(InterruptCodeIsPatched(unoptimized_code, | |
| 132 pc_after, | |
| 133 interrupt_code, | |
| 134 replacement_code)); | |
| 135 // Restore the original jump. | 125 // Restore the original jump. |
| 136 Address call_target_address = pc_after - kIntSize; | 126 Address call_target_address = pc_after - kIntSize; |
| 137 *(call_target_address - 3) = kJnsInstruction; | 127 *(call_target_address - 3) = kJnsInstruction; |
| 138 *(call_target_address - 2) = kJnsOffset; | 128 *(call_target_address - 2) = kJnsOffset; |
| 139 // Restore the original call address. | 129 // Restore the original call address. |
| 140 Assembler::set_target_address_at(call_target_address, | 130 Assembler::set_target_address_at(call_target_address, |
| 141 interrupt_code->entry()); | 131 interrupt_code->entry()); |
| 142 | 132 |
| 143 interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( | 133 interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
| 144 unoptimized_code, call_target_address, interrupt_code); | 134 unoptimized_code, call_target_address, interrupt_code); |
| 145 } | 135 } |
| 146 | 136 |
| 147 | 137 |
| 148 #ifdef DEBUG | 138 #ifdef DEBUG |
| 149 bool Deoptimizer::InterruptCodeIsPatched(Code* unoptimized_code, | 139 Deoptimizer::InterruptPatchState Deoptimizer::GetInterruptPatchState( |
| 150 Address pc_after, | 140 Isolate* isolate, |
| 151 Code* interrupt_code, | 141 Code* unoptimized_code, |
| 152 Code* replacement_code) { | 142 Address pc_after) { |
| 153 Address call_target_address = pc_after - kIntSize; | 143 Address call_target_address = pc_after - kIntSize; |
| 154 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); | 144 ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
| 155 if (*(call_target_address - 3) == kNopByteOne) { | 145 if (*(call_target_address - 3) == kNopByteOne) { |
| 156 ASSERT(replacement_code->entry() == | |
| 157 Assembler::target_address_at(call_target_address)); | |
| 158 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); | 146 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); |
| 159 return true; | 147 Code* osr_builtin = |
| 148 isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
| 149 ASSERT_EQ(osr_builtin->entry(), |
| 150 Assembler::target_address_at(call_target_address)); |
| 151 return PATCHED_FOR_OSR; |
| 160 } else { | 152 } else { |
| 153 // Get the interrupt stub code object to match against from cache. |
| 154 Code* interrupt_code = NULL; |
| 155 InterruptStub stub; |
| 156 if (!stub.FindCodeInCache(&interrupt_code, isolate)) UNREACHABLE(); |
| 161 ASSERT_EQ(interrupt_code->entry(), | 157 ASSERT_EQ(interrupt_code->entry(), |
| 162 Assembler::target_address_at(call_target_address)); | 158 Assembler::target_address_at(call_target_address)); |
| 163 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); | 159 ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); |
| 164 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); | 160 ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); |
| 165 return false; | 161 return NOT_PATCHED; |
| 166 } | 162 } |
| 167 } | 163 } |
| 168 #endif // DEBUG | 164 #endif // DEBUG |
| 169 | 165 |
| 170 | 166 |
| 171 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { | 167 static int LookupBailoutId(DeoptimizationInputData* data, BailoutId ast_id) { |
| 172 ByteArray* translations = data->TranslationByteArray(); | 168 ByteArray* translations = data->TranslationByteArray(); |
| 173 int length = data->DeoptCount(); | 169 int length = data->DeoptCount(); |
| 174 for (int i = 0; i < length; i++) { | 170 for (int i = 0; i < length; i++) { |
| 175 if (data->AstId(i) == ast_id) { | 171 if (data->AstId(i) == ast_id) { |
| (...skipping 411 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 587 SetFrameSlot(offset, value); | 583 SetFrameSlot(offset, value); |
| 588 } | 584 } |
| 589 | 585 |
| 590 | 586 |
| 591 #undef __ | 587 #undef __ |
| 592 | 588 |
| 593 | 589 |
| 594 } } // namespace v8::internal | 590 } } // namespace v8::internal |
| 595 | 591 |
| 596 #endif // V8_TARGET_ARCH_X64 | 592 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |