OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 263 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
274 } | 274 } |
275 | 275 |
276 instr->CompileToNative(this); | 276 instr->CompileToNative(this); |
277 } | 277 } |
278 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); | 278 EnsureSpaceForLazyDeopt(Deoptimizer::patch_size()); |
279 return !is_aborted(); | 279 return !is_aborted(); |
280 } | 280 } |
281 | 281 |
282 | 282 |
283 bool LCodeGen::GenerateJumpTable() { | 283 bool LCodeGen::GenerateJumpTable() { |
284 Label needs_frame_not_call; | 284 Label needs_frame; |
285 Label needs_frame_is_call; | |
286 if (jump_table_.length() > 0) { | 285 if (jump_table_.length() > 0) { |
287 Comment(";;; -------------------- Jump table --------------------"); | 286 Comment(";;; -------------------- Jump table --------------------"); |
288 } | 287 } |
289 for (int i = 0; i < jump_table_.length(); i++) { | 288 for (int i = 0; i < jump_table_.length(); i++) { |
290 __ bind(&jump_table_[i].label); | 289 __ bind(&jump_table_[i].label); |
291 Address entry = jump_table_[i].address; | 290 Address entry = jump_table_[i].address; |
292 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; | 291 Deoptimizer::BailoutType type = jump_table_[i].bailout_type; |
293 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 292 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
294 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 293 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
295 Comment(";;; jump table entry %d.", i); | 294 Comment(";;; jump table entry %d.", i); |
296 } else { | 295 } else { |
297 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 296 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
298 } | 297 } |
299 if (jump_table_[i].needs_frame) { | 298 if (jump_table_[i].needs_frame) { |
300 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); | 299 __ movq(kScratchRegister, ExternalReference::ForDeoptEntry(entry)); |
301 if (type == Deoptimizer::LAZY) { | 300 if (needs_frame.is_bound()) { |
302 if (needs_frame_is_call.is_bound()) { | 301 __ jmp(&needs_frame); |
303 __ jmp(&needs_frame_is_call); | |
304 } else { | |
305 __ bind(&needs_frame_is_call); | |
306 __ push(rbp); | |
307 __ movq(rbp, rsp); | |
308 __ push(rsi); | |
309 // This variant of deopt can only be used with stubs. Since we don't | |
310 // have a function pointer to install in the stack frame that we're | |
311 // building, install a special marker there instead. | |
312 ASSERT(info()->IsStub()); | |
313 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); | |
314 __ push(rsi); | |
315 __ movq(rsi, MemOperand(rsp, kPointerSize)); | |
316 __ call(kScratchRegister); | |
317 } | |
318 } else { | 302 } else { |
319 if (needs_frame_not_call.is_bound()) { | 303 __ bind(&needs_frame); |
320 __ jmp(&needs_frame_not_call); | 304 __ push(rbp); |
321 } else { | 305 __ movq(rbp, rsp); |
322 __ bind(&needs_frame_not_call); | 306 __ push(rsi); |
323 __ push(rbp); | 307 // This variant of deopt can only be used with stubs. Since we don't |
324 __ movq(rbp, rsp); | 308 // have a function pointer to install in the stack frame that we're |
325 __ push(rsi); | 309 // building, install a special marker there instead. |
326 // This variant of deopt can only be used with stubs. Since we don't | 310 ASSERT(info()->IsStub()); |
327 // have a function pointer to install in the stack frame that we're | 311 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); |
328 // building, install a special marker there instead. | 312 __ push(rsi); |
329 ASSERT(info()->IsStub()); | 313 __ movq(rsi, MemOperand(rsp, kPointerSize)); |
330 __ Move(rsi, Smi::FromInt(StackFrame::STUB)); | 314 __ call(kScratchRegister); |
331 __ push(rsi); | |
332 __ movq(rsi, MemOperand(rsp, kPointerSize)); | |
333 __ jmp(kScratchRegister); | |
334 } | |
335 } | 315 } |
336 } else { | 316 } else { |
337 if (type == Deoptimizer::LAZY) { | 317 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
338 __ call(entry, RelocInfo::RUNTIME_ENTRY); | |
339 } else { | |
340 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); | |
341 } | |
342 } | 318 } |
343 } | 319 } |
344 return !is_aborted(); | 320 return !is_aborted(); |
345 } | 321 } |
346 | 322 |
347 | 323 |
348 bool LCodeGen::GenerateDeferredCode() { | 324 bool LCodeGen::GenerateDeferredCode() { |
349 ASSERT(is_generating()); | 325 ASSERT(is_generating()); |
350 if (deferred_.length() > 0) { | 326 if (deferred_.length() > 0) { |
351 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { | 327 for (int i = 0; !is_aborted() && i < deferred_.length(); i++) { |
(...skipping 330 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
682 if (FLAG_trap_on_deopt && info()->IsOptimizing()) { | 658 if (FLAG_trap_on_deopt && info()->IsOptimizing()) { |
683 Label done; | 659 Label done; |
684 if (cc != no_condition) { | 660 if (cc != no_condition) { |
685 __ j(NegateCondition(cc), &done, Label::kNear); | 661 __ j(NegateCondition(cc), &done, Label::kNear); |
686 } | 662 } |
687 __ int3(); | 663 __ int3(); |
688 __ bind(&done); | 664 __ bind(&done); |
689 } | 665 } |
690 | 666 |
691 ASSERT(info()->IsStub() || frame_is_built_); | 667 ASSERT(info()->IsStub() || frame_is_built_); |
692 bool needs_lazy_deopt = info()->IsStub(); | |
693 if (cc == no_condition && frame_is_built_) { | 668 if (cc == no_condition && frame_is_built_) { |
694 if (needs_lazy_deopt) { | 669 __ call(entry, RelocInfo::RUNTIME_ENTRY); |
695 __ call(entry, RelocInfo::RUNTIME_ENTRY); | |
696 } else { | |
697 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); | |
698 } | |
699 } else { | 670 } else { |
700 // We often have several deopts to the same entry, reuse the last | 671 // We often have several deopts to the same entry, reuse the last |
701 // jump entry if this is the case. | 672 // jump entry if this is the case. |
702 if (jump_table_.is_empty() || | 673 if (jump_table_.is_empty() || |
703 jump_table_.last().address != entry || | 674 jump_table_.last().address != entry || |
704 jump_table_.last().needs_frame != !frame_is_built_ || | 675 jump_table_.last().needs_frame != !frame_is_built_ || |
705 jump_table_.last().bailout_type != bailout_type) { | 676 jump_table_.last().bailout_type != bailout_type) { |
706 Deoptimizer::JumpTableEntry table_entry(entry, | 677 Deoptimizer::JumpTableEntry table_entry(entry, |
707 bailout_type, | 678 bailout_type, |
708 !frame_is_built_); | 679 !frame_is_built_); |
(...skipping 4924 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5633 FixedArray::kHeaderSize - kPointerSize)); | 5604 FixedArray::kHeaderSize - kPointerSize)); |
5634 __ bind(&done); | 5605 __ bind(&done); |
5635 } | 5606 } |
5636 | 5607 |
5637 | 5608 |
5638 #undef __ | 5609 #undef __ |
5639 | 5610 |
5640 } } // namespace v8::internal | 5611 } } // namespace v8::internal |
5641 | 5612 |
5642 #endif // V8_TARGET_ARCH_X64 | 5613 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |