OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
190 | 190 |
191 // Continue with next deoptimization entry. | 191 // Continue with next deoptimization entry. |
192 current_entry = next_entry; | 192 current_entry = next_entry; |
193 entry_pc = next_pc; | 193 entry_pc = next_pc; |
194 } | 194 } |
195 | 195 |
196 #ifdef DEBUG | 196 #ifdef DEBUG |
197 // Destroy the code which is not supposed to run again. | 197 // Destroy the code which is not supposed to run again. |
198 ZapCodeRange(previous_pc, jump_table_address); | 198 ZapCodeRange(previous_pc, jump_table_address); |
199 #endif | 199 #endif |
| 200 Isolate* isolate = code->GetIsolate(); |
200 | 201 |
201 // Add the deoptimizing code to the list. | 202 // Add the deoptimizing code to the list. |
202 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); | 203 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code); |
203 DeoptimizerData* data = code->GetIsolate()->deoptimizer_data(); | 204 DeoptimizerData* data = isolate->deoptimizer_data(); |
204 node->set_next(data->deoptimizing_code_list_); | 205 node->set_next(data->deoptimizing_code_list_); |
205 data->deoptimizing_code_list_ = node; | 206 data->deoptimizing_code_list_ = node; |
206 | 207 |
| 208 // We might be in the middle of incremental marking with compaction. |
| 209 // Tell collector to treat this code object in a special way and |
| 210 // ignore all slots that might have been recorded on it. |
| 211 isolate->heap()->mark_compact_collector()->InvalidateCode(code); |
| 212 |
207 // Set the code for the function to non-optimized version. | 213 // Set the code for the function to non-optimized version. |
208 function->ReplaceCode(function->shared()->code()); | 214 function->ReplaceCode(function->shared()->code()); |
209 | 215 |
210 if (FLAG_trace_deopt) { | 216 if (FLAG_trace_deopt) { |
211 PrintF("[forced deoptimization: "); | 217 PrintF("[forced deoptimization: "); |
212 function->PrintName(); | 218 function->PrintName(); |
213 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); | 219 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function)); |
214 #ifdef DEBUG | 220 #ifdef DEBUG |
215 if (FLAG_print_code) { | 221 if (FLAG_print_code) { |
216 code->PrintLn(); | 222 code->PrintLn(); |
217 } | 223 } |
218 #endif | 224 #endif |
219 } | 225 } |
220 } | 226 } |
221 | 227 |
222 | 228 |
223 void Deoptimizer::PatchStackCheckCodeAt(Address pc_after, | 229 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code, |
| 230 Address pc_after, |
224 Code* check_code, | 231 Code* check_code, |
225 Code* replacement_code) { | 232 Code* replacement_code) { |
226 Address call_target_address = pc_after - kIntSize; | 233 Address call_target_address = pc_after - kIntSize; |
227 ASSERT(check_code->entry() == | 234 ASSERT(check_code->entry() == |
228 Assembler::target_address_at(call_target_address)); | 235 Assembler::target_address_at(call_target_address)); |
229 // The stack check code matches the pattern: | 236 // The stack check code matches the pattern: |
230 // | 237 // |
231 // cmp rsp, <limit> | 238 // cmp rsp, <limit> |
232 // jae ok | 239 // jae ok |
233 // call <stack guard> | 240 // call <stack guard> |
234 // test rax, <loop nesting depth> | 241 // test rax, <loop nesting depth> |
235 // ok: ... | 242 // ok: ... |
236 // | 243 // |
237 // We will patch away the branch so the code is: | 244 // We will patch away the branch so the code is: |
238 // | 245 // |
239 // cmp rsp, <limit> ;; Not changed | 246 // cmp rsp, <limit> ;; Not changed |
240 // nop | 247 // nop |
241 // nop | 248 // nop |
242 // call <on-stack replacment> | 249 // call <on-stack replacment> |
243 // test rax, <loop nesting depth> | 250 // test rax, <loop nesting depth> |
244 // ok: | 251 // ok: |
245 // | 252 // |
246 ASSERT(*(call_target_address - 3) == 0x73 && // jae | 253 ASSERT(*(call_target_address - 3) == 0x73 && // jae |
247 *(call_target_address - 2) == 0x07 && // offset | 254 *(call_target_address - 2) == 0x07 && // offset |
248 *(call_target_address - 1) == 0xe8); // call | 255 *(call_target_address - 1) == 0xe8); // call |
249 *(call_target_address - 3) = 0x90; // nop | 256 *(call_target_address - 3) = 0x90; // nop |
250 *(call_target_address - 2) = 0x90; // nop | 257 *(call_target_address - 2) = 0x90; // nop |
251 Assembler::set_target_address_at(call_target_address, | 258 Assembler::set_target_address_at(call_target_address, |
252 replacement_code->entry()); | 259 replacement_code->entry()); |
| 260 |
| 261 RelocInfo rinfo(call_target_address, |
| 262 RelocInfo::CODE_TARGET, |
| 263 0, |
| 264 unoptimized_code); |
| 265 unoptimized_code->GetHeap()->incremental_marking()->RecordWriteIntoCode( |
| 266 unoptimized_code, &rinfo, replacement_code); |
253 } | 267 } |
254 | 268 |
255 | 269 |
256 void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, | 270 void Deoptimizer::RevertStackCheckCodeAt(Address pc_after, |
257 Code* check_code, | 271 Code* check_code, |
258 Code* replacement_code) { | 272 Code* replacement_code) { |
259 Address call_target_address = pc_after - kIntSize; | 273 Address call_target_address = pc_after - kIntSize; |
260 ASSERT(replacement_code->entry() == | 274 ASSERT(replacement_code->entry() == |
261 Assembler::target_address_at(call_target_address)); | 275 Assembler::target_address_at(call_target_address)); |
262 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to | 276 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to |
263 // restore the conditional branch. | 277 // restore the conditional branch. |
264 ASSERT(*(call_target_address - 3) == 0x90 && // nop | 278 ASSERT(*(call_target_address - 3) == 0x90 && // nop |
265 *(call_target_address - 2) == 0x90 && // nop | 279 *(call_target_address - 2) == 0x90 && // nop |
266 *(call_target_address - 1) == 0xe8); // call | 280 *(call_target_address - 1) == 0xe8); // call |
267 *(call_target_address - 3) = 0x73; // jae | 281 *(call_target_address - 3) = 0x73; // jae |
268 *(call_target_address - 2) = 0x07; // offset | 282 *(call_target_address - 2) = 0x07; // offset |
269 Assembler::set_target_address_at(call_target_address, | 283 Assembler::set_target_address_at(call_target_address, |
270 check_code->entry()); | 284 check_code->entry()); |
| 285 check_code->GetHeap()->incremental_marking()-> |
| 286 RecordCodeTargetPatch(call_target_address, check_code); |
271 } | 287 } |
272 | 288 |
273 | 289 |
274 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) { | 290 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) { |
275 ByteArray* translations = data->TranslationByteArray(); | 291 ByteArray* translations = data->TranslationByteArray(); |
276 int length = data->DeoptCount(); | 292 int length = data->DeoptCount(); |
277 for (int i = 0; i < length; i++) { | 293 for (int i = 0; i < length; i++) { |
278 if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) { | 294 if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) { |
279 TranslationIterator it(translations, data->TranslationIndex(i)->value()); | 295 TranslationIterator it(translations, data->TranslationIndex(i)->value()); |
280 int value = it.Next(); | 296 int value = it.Next(); |
(...skipping 425 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
706 __ movq(Operand(rsp, 4 * kPointerSize), arg5); | 722 __ movq(Operand(rsp, 4 * kPointerSize), arg5); |
707 __ LoadAddress(arg5, ExternalReference::isolate_address()); | 723 __ LoadAddress(arg5, ExternalReference::isolate_address()); |
708 __ movq(Operand(rsp, 5 * kPointerSize), arg5); | 724 __ movq(Operand(rsp, 5 * kPointerSize), arg5); |
709 #else | 725 #else |
710 __ movq(r8, arg5); | 726 __ movq(r8, arg5); |
711 __ LoadAddress(r9, ExternalReference::isolate_address()); | 727 __ LoadAddress(r9, ExternalReference::isolate_address()); |
712 #endif | 728 #endif |
713 | 729 |
714 Isolate* isolate = masm()->isolate(); | 730 Isolate* isolate = masm()->isolate(); |
715 | 731 |
716 __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6); | 732 { |
| 733 AllowExternalCallThatCantCauseGC scope(masm()); |
| 734 __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6); |
| 735 } |
717 // Preserve deoptimizer object in register rax and get the input | 736 // Preserve deoptimizer object in register rax and get the input |
718 // frame descriptor pointer. | 737 // frame descriptor pointer. |
719 __ movq(rbx, Operand(rax, Deoptimizer::input_offset())); | 738 __ movq(rbx, Operand(rax, Deoptimizer::input_offset())); |
720 | 739 |
721 // Fill in the input registers. | 740 // Fill in the input registers. |
722 for (int i = kNumberOfRegisters -1; i >= 0; i--) { | 741 for (int i = kNumberOfRegisters -1; i >= 0; i--) { |
723 int offset = (i * kPointerSize) + FrameDescription::registers_offset(); | 742 int offset = (i * kPointerSize) + FrameDescription::registers_offset(); |
724 __ pop(Operand(rbx, offset)); | 743 __ pop(Operand(rbx, offset)); |
725 } | 744 } |
726 | 745 |
(...skipping 25 matching lines...) Expand all Loading... |
752 __ pop(Operand(rdx, 0)); | 771 __ pop(Operand(rdx, 0)); |
753 __ addq(rdx, Immediate(sizeof(intptr_t))); | 772 __ addq(rdx, Immediate(sizeof(intptr_t))); |
754 __ cmpq(rcx, rsp); | 773 __ cmpq(rcx, rsp); |
755 __ j(not_equal, &pop_loop); | 774 __ j(not_equal, &pop_loop); |
756 | 775 |
757 // Compute the output frame in the deoptimizer. | 776 // Compute the output frame in the deoptimizer. |
758 __ push(rax); | 777 __ push(rax); |
759 __ PrepareCallCFunction(2); | 778 __ PrepareCallCFunction(2); |
760 __ movq(arg1, rax); | 779 __ movq(arg1, rax); |
761 __ LoadAddress(arg2, ExternalReference::isolate_address()); | 780 __ LoadAddress(arg2, ExternalReference::isolate_address()); |
762 __ CallCFunction( | 781 { |
763 ExternalReference::compute_output_frames_function(isolate), 2); | 782 AllowExternalCallThatCantCauseGC scope(masm()); |
| 783 __ CallCFunction( |
| 784 ExternalReference::compute_output_frames_function(isolate), 2); |
| 785 } |
764 __ pop(rax); | 786 __ pop(rax); |
765 | 787 |
766 // Replace the current frame with the output frames. | 788 // Replace the current frame with the output frames. |
767 Label outer_push_loop, inner_push_loop; | 789 Label outer_push_loop, inner_push_loop; |
768 // Outer loop state: rax = current FrameDescription**, rdx = one past the | 790 // Outer loop state: rax = current FrameDescription**, rdx = one past the |
769 // last FrameDescription**. | 791 // last FrameDescription**. |
770 __ movl(rdx, Operand(rax, Deoptimizer::output_count_offset())); | 792 __ movl(rdx, Operand(rax, Deoptimizer::output_count_offset())); |
771 __ movq(rax, Operand(rax, Deoptimizer::output_offset())); | 793 __ movq(rax, Operand(rax, Deoptimizer::output_offset())); |
772 __ lea(rdx, Operand(rax, rdx, times_8, 0)); | 794 __ lea(rdx, Operand(rax, rdx, times_8, 0)); |
773 __ bind(&outer_push_loop); | 795 __ bind(&outer_push_loop); |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
838 } | 860 } |
839 __ bind(&done); | 861 __ bind(&done); |
840 } | 862 } |
841 | 863 |
842 #undef __ | 864 #undef __ |
843 | 865 |
844 | 866 |
845 } } // namespace v8::internal | 867 } } // namespace v8::internal |
846 | 868 |
847 #endif // V8_TARGET_ARCH_X64 | 869 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |