OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
99 function->ReplaceCode(function->shared()->code()); | 99 function->ReplaceCode(function->shared()->code()); |
100 | 100 |
101 if (FLAG_trace_deopt) { | 101 if (FLAG_trace_deopt) { |
102 PrintF("[forced deoptimization: "); | 102 PrintF("[forced deoptimization: "); |
103 function->PrintName(); | 103 function->PrintName(); |
104 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); | 104 PrintF(" / %x]\n", reinterpret_cast<uint32_t>(function)); |
105 } | 105 } |
106 } | 106 } |
107 | 107 |
108 | 108 |
109 void Deoptimizer::PatchStackCheckCode(Code* unoptimized_code, | 109 void Deoptimizer::PatchStackCheckAt(Address pc_after, |
110 Code* check_code, | 110 Code* check_code, |
111 Code* replacement_code) { | 111 Code* replacement_code) { |
112 // Iterate the unoptimized code and patch every stack check except at | 112 Address call_target_address = pc_after - kPointerSize; |
113 // the function entry. This code assumes the function entry stack | 113 ASSERT(check_code->entry() == |
114 // check appears first i.e., is not deferred or otherwise reordered. | 114 Assembler::target_address_at(call_target_address)); |
115 ASSERT(unoptimized_code->kind() == Code::FUNCTION); | 115 // The stack check code matches the pattern: |
116 bool first = true; | 116 // |
117 for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask); | 117 // cmp esp, <limit> |
118 !it.done(); | 118 // jae ok |
119 it.next()) { | 119 // call <stack guard> |
120 RelocInfo* rinfo = it.rinfo(); | 120 // test eax, <loop nesting depth> |
121 if (rinfo->target_address() == Code::cast(check_code)->entry()) { | 121 // ok: ... |
122 if (first) { | 122 // |
123 first = false; | 123 // We will patch away the branch so the code is: |
124 } else { | 124 // |
125 // The stack check code matches the pattern: | 125 // cmp esp, <limit> ;; Not changed |
126 // | 126 // nop |
127 // cmp esp, <limit> | 127 // nop |
128 // jae ok | 128 // call <on-stack replacment> |
129 // call <stack guard> | 129 // test eax, <loop nesting depth> |
130 // test eax, <loop nesting depth> | 130 // ok: |
131 // ok: ... | 131 ASSERT(*(call_target_address - 3) == 0x73 && // jae |
132 // | 132 *(call_target_address - 2) == 0x07 && // offset |
133 // We will patch away the branch so the code is: | 133 *(call_target_address - 1) == 0xe8); // call |
134 // | 134 *(call_target_address - 3) = 0x90; // nop |
135 // cmp esp, <limit> ;; Not changed | 135 *(call_target_address - 2) = 0x90; // nop |
136 // nop | 136 Assembler::set_target_address_at(call_target_address, |
137 // nop | 137 replacement_code->entry()); |
138 // call <on-stack replacment> | |
139 // test eax, <loop nesting depth> | |
140 // ok: | |
141 Address call_target_address = rinfo->pc(); | |
142 ASSERT(*(call_target_address - 3) == 0x73 && // jae | |
143 *(call_target_address - 2) == 0x07 && // offset | |
144 *(call_target_address - 1) == 0xe8); // call | |
145 *(call_target_address - 3) = 0x90; // nop | |
146 *(call_target_address - 2) = 0x90; // nop | |
147 rinfo->set_target_address(replacement_code->entry()); | |
148 } | |
149 } | |
150 } | |
151 } | 138 } |
152 | 139 |
153 | 140 |
154 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, | 141 void Deoptimizer::RevertStackCheckCode(Code* unoptimized_code, |
155 Code* check_code, | 142 Code* check_code, |
156 Code* replacement_code) { | 143 Code* replacement_code) { |
157 // Iterate the unoptimized code and revert all the patched stack checks. | 144 // Iterate the unoptimized code and revert all the patched stack checks. |
158 for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask); | 145 for (RelocIterator it(unoptimized_code, RelocInfo::kCodeTargetMask); |
159 !it.done(); | 146 !it.done(); |
160 it.next()) { | 147 it.next()) { |
(...skipping 480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
641 } | 628 } |
642 __ bind(&done); | 629 __ bind(&done); |
643 } | 630 } |
644 | 631 |
645 #undef __ | 632 #undef __ |
646 | 633 |
647 | 634 |
648 } } // namespace v8::internal | 635 } } // namespace v8::internal |
649 | 636 |
650 #endif // V8_TARGET_ARCH_IA32 | 637 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |