OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 754 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
765 } | 765 } |
766 | 766 |
767 | 767 |
768 void LCodeGen::DeoptimizeIf(Condition cc, | 768 void LCodeGen::DeoptimizeIf(Condition cc, |
769 LEnvironment* environment, | 769 LEnvironment* environment, |
770 Register src1, | 770 Register src1, |
771 const Operand& src2) { | 771 const Operand& src2) { |
772 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 772 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
773 ASSERT(environment->HasBeenRegistered()); | 773 ASSERT(environment->HasBeenRegistered()); |
774 int id = environment->deoptimization_index(); | 774 int id = environment->deoptimization_index(); |
775 | 775 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
776 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 776 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
777 ? Deoptimizer::LAZY | 777 ? Deoptimizer::LAZY |
778 : Deoptimizer::EAGER; | 778 : Deoptimizer::EAGER; |
779 Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type); | 779 Address entry = Deoptimizer::GetDeoptimizationEntry(id, bailout_type); |
780 if (entry == NULL) { | 780 if (entry == NULL) { |
781 Abort("bailout was not prepared"); | 781 Abort("bailout was not prepared"); |
782 return; | 782 return; |
783 } | 783 } |
784 | 784 |
785 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. | 785 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. |
786 | |
787 if (FLAG_deopt_every_n_times == 1 && info_->opt_count() == id) { | 786 if (FLAG_deopt_every_n_times == 1 && info_->opt_count() == id) { |
788 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); | 787 __ Jump(entry, RelocInfo::RUNTIME_ENTRY); |
789 return; | 788 return; |
790 } | 789 } |
791 | 790 |
792 if (FLAG_trap_on_deopt) { | 791 if (FLAG_trap_on_deopt) { |
793 Label skip; | 792 Label skip; |
794 if (cc != al) { | 793 if (cc != al) { |
795 __ Branch(&skip, NegateCondition(cc), src1, src2); | 794 __ Branch(&skip, NegateCondition(cc), src1, src2); |
796 } | 795 } |
797 __ stop("trap_on_deopt"); | 796 __ stop("trap_on_deopt"); |
798 __ bind(&skip); | 797 __ bind(&skip); |
799 } | 798 } |
800 | 799 |
| 800 ASSERT(info()->IsStub() || frame_is_built_); |
801 bool needs_lazy_deopt = info()->IsStub(); | 801 bool needs_lazy_deopt = info()->IsStub(); |
802 ASSERT(info()->IsStub() || frame_is_built_); | 802 if (cc == al && frame_is_built_) { |
803 if (cc == al && !needs_lazy_deopt) { | 803 if (needs_lazy_deopt) { |
804 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); | 804 __ Call(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); |
| 805 } else { |
| 806 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); |
| 807 } |
805 } else { | 808 } else { |
806 // We often have several deopts to the same entry, reuse the last | 809 // We often have several deopts to the same entry, reuse the last |
807 // jump entry if this is the case. | 810 // jump entry if this is the case. |
808 if (deopt_jump_table_.is_empty() || | 811 if (deopt_jump_table_.is_empty() || |
809 (deopt_jump_table_.last().address != entry) || | 812 (deopt_jump_table_.last().address != entry) || |
810 (deopt_jump_table_.last().is_lazy_deopt != needs_lazy_deopt) || | 813 (deopt_jump_table_.last().is_lazy_deopt != needs_lazy_deopt) || |
811 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 814 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { |
812 JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt); | 815 JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt); |
813 deopt_jump_table_.Add(table_entry, zone()); | 816 deopt_jump_table_.Add(table_entry, zone()); |
814 } | 817 } |
(...skipping 4995 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5810 __ Subu(scratch, result, scratch); | 5813 __ Subu(scratch, result, scratch); |
5811 __ lw(result, FieldMemOperand(scratch, | 5814 __ lw(result, FieldMemOperand(scratch, |
5812 FixedArray::kHeaderSize - kPointerSize)); | 5815 FixedArray::kHeaderSize - kPointerSize)); |
5813 __ bind(&done); | 5816 __ bind(&done); |
5814 } | 5817 } |
5815 | 5818 |
5816 | 5819 |
5817 #undef __ | 5820 #undef __ |
5818 | 5821 |
5819 } } // namespace v8::internal | 5822 } } // namespace v8::internal |
OLD | NEW |