| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 314 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 325 deopt_jump_table_.length() * 12)) { | 325 deopt_jump_table_.length() * 12)) { |
| 326 Abort("Generated code is too large"); | 326 Abort("Generated code is too large"); |
| 327 } | 327 } |
| 328 | 328 |
| 329 if (deopt_jump_table_.length() > 0) { | 329 if (deopt_jump_table_.length() > 0) { |
| 330 Comment(";;; -------------------- Jump table --------------------"); | 330 Comment(";;; -------------------- Jump table --------------------"); |
| 331 } | 331 } |
| 332 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 332 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 333 Label table_start; | 333 Label table_start; |
| 334 __ bind(&table_start); | 334 __ bind(&table_start); |
| 335 Label needs_frame_not_call; | 335 Label needs_frame; |
| 336 Label needs_frame_is_call; | |
| 337 for (int i = 0; i < deopt_jump_table_.length(); i++) { | 336 for (int i = 0; i < deopt_jump_table_.length(); i++) { |
| 338 __ bind(&deopt_jump_table_[i].label); | 337 __ bind(&deopt_jump_table_[i].label); |
| 339 Address entry = deopt_jump_table_[i].address; | 338 Address entry = deopt_jump_table_[i].address; |
| 340 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; | 339 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; |
| 341 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 340 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
| 342 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 341 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
| 343 Comment(";;; jump table entry %d.", i); | 342 Comment(";;; jump table entry %d.", i); |
| 344 } else { | 343 } else { |
| 345 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 344 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 346 } | 345 } |
| 347 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); | 346 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); |
| 348 if (deopt_jump_table_[i].needs_frame) { | 347 if (deopt_jump_table_[i].needs_frame) { |
| 349 if (type == Deoptimizer::LAZY) { | 348 if (needs_frame.is_bound()) { |
| 350 if (needs_frame_is_call.is_bound()) { | 349 __ Branch(&needs_frame); |
| 351 __ Branch(&needs_frame_is_call); | |
| 352 } else { | |
| 353 __ bind(&needs_frame_is_call); | |
| 354 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); | |
| 355 // This variant of deopt can only be used with stubs. Since we don't | |
| 356 // have a function pointer to install in the stack frame that we're | |
| 357 // building, install a special marker there instead. | |
| 358 ASSERT(info()->IsStub()); | |
| 359 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | |
| 360 __ push(scratch0()); | |
| 361 __ Addu(fp, sp, Operand(2 * kPointerSize)); | |
| 362 __ Call(t9); | |
| 363 } | |
| 364 } else { | 350 } else { |
| 365 if (needs_frame_not_call.is_bound()) { | 351 __ bind(&needs_frame); |
| 366 __ Branch(&needs_frame_not_call); | 352 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); |
| 367 } else { | 353 // This variant of deopt can only be used with stubs. Since we don't |
| 368 __ bind(&needs_frame_not_call); | 354 // have a function pointer to install in the stack frame that we're |
| 369 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); | 355 // building, install a special marker there instead. |
| 370 // This variant of deopt can only be used with stubs. Since we don't | 356 ASSERT(info()->IsStub()); |
| 371 // have a function pointer to install in the stack frame that we're | 357 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); |
| 372 // building, install a special marker there instead. | 358 __ push(scratch0()); |
| 373 ASSERT(info()->IsStub()); | 359 __ Addu(fp, sp, Operand(2 * kPointerSize)); |
| 374 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 360 __ Call(t9); |
| 375 __ push(scratch0()); | |
| 376 __ Addu(fp, sp, Operand(2 * kPointerSize)); | |
| 377 __ Jump(t9); | |
| 378 } | |
| 379 } | 361 } |
| 380 } else { | 362 } else { |
| 381 if (type == Deoptimizer::LAZY) { | 363 __ Call(t9); |
| 382 __ Call(t9); | |
| 383 } else { | |
| 384 __ Jump(t9); | |
| 385 } | |
| 386 } | 364 } |
| 387 } | 365 } |
| 388 __ RecordComment("]"); | 366 __ RecordComment("]"); |
| 389 | 367 |
| 390 // The deoptimization jump table is the last part of the instruction | 368 // The deoptimization jump table is the last part of the instruction |
| 391 // sequence. Mark the generated code as done unless we bailed out. | 369 // sequence. Mark the generated code as done unless we bailed out. |
| 392 if (!is_aborted()) status_ = DONE; | 370 if (!is_aborted()) status_ = DONE; |
| 393 return !is_aborted(); | 371 return !is_aborted(); |
| 394 } | 372 } |
| 395 | 373 |
| (...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 773 if (FLAG_trap_on_deopt && info()->IsOptimizing()) { | 751 if (FLAG_trap_on_deopt && info()->IsOptimizing()) { |
| 774 Label skip; | 752 Label skip; |
| 775 if (cc != al) { | 753 if (cc != al) { |
| 776 __ Branch(&skip, NegateCondition(cc), src1, src2); | 754 __ Branch(&skip, NegateCondition(cc), src1, src2); |
| 777 } | 755 } |
| 778 __ stop("trap_on_deopt"); | 756 __ stop("trap_on_deopt"); |
| 779 __ bind(&skip); | 757 __ bind(&skip); |
| 780 } | 758 } |
| 781 | 759 |
| 782 ASSERT(info()->IsStub() || frame_is_built_); | 760 ASSERT(info()->IsStub() || frame_is_built_); |
| 783 bool needs_lazy_deopt = info()->IsStub(); | |
| 784 if (cc == al && frame_is_built_) { | 761 if (cc == al && frame_is_built_) { |
| 785 if (needs_lazy_deopt) { | 762 __ Call(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); |
| 786 __ Call(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); | |
| 787 } else { | |
| 788 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); | |
| 789 } | |
| 790 } else { | 763 } else { |
| 791 // We often have several deopts to the same entry, reuse the last | 764 // We often have several deopts to the same entry, reuse the last |
| 792 // jump entry if this is the case. | 765 // jump entry if this is the case. |
| 793 if (deopt_jump_table_.is_empty() || | 766 if (deopt_jump_table_.is_empty() || |
| 794 (deopt_jump_table_.last().address != entry) || | 767 (deopt_jump_table_.last().address != entry) || |
| 795 (deopt_jump_table_.last().bailout_type != bailout_type) || | 768 (deopt_jump_table_.last().bailout_type != bailout_type) || |
| 796 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 769 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { |
| 797 Deoptimizer::JumpTableEntry table_entry(entry, | 770 Deoptimizer::JumpTableEntry table_entry(entry, |
| 798 bailout_type, | 771 bailout_type, |
| 799 !frame_is_built_); | 772 !frame_is_built_); |
| (...skipping 5094 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5894 __ Subu(scratch, result, scratch); | 5867 __ Subu(scratch, result, scratch); |
| 5895 __ lw(result, FieldMemOperand(scratch, | 5868 __ lw(result, FieldMemOperand(scratch, |
| 5896 FixedArray::kHeaderSize - kPointerSize)); | 5869 FixedArray::kHeaderSize - kPointerSize)); |
| 5897 __ bind(&done); | 5870 __ bind(&done); |
| 5898 } | 5871 } |
| 5899 | 5872 |
| 5900 | 5873 |
| 5901 #undef __ | 5874 #undef __ |
| 5902 | 5875 |
| 5903 } } // namespace v8::internal | 5876 } } // namespace v8::internal |
| OLD | NEW |