| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 331 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 342 Comment(";;; -------------------- Jump table --------------------"); | 342 Comment(";;; -------------------- Jump table --------------------"); |
| 343 } | 343 } |
| 344 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | 344 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
| 345 Label table_start; | 345 Label table_start; |
| 346 __ bind(&table_start); | 346 __ bind(&table_start); |
| 347 Label needs_frame_not_call; | 347 Label needs_frame_not_call; |
| 348 Label needs_frame_is_call; | 348 Label needs_frame_is_call; |
| 349 for (int i = 0; i < deopt_jump_table_.length(); i++) { | 349 for (int i = 0; i < deopt_jump_table_.length(); i++) { |
| 350 __ bind(&deopt_jump_table_[i].label); | 350 __ bind(&deopt_jump_table_[i].label); |
| 351 Address entry = deopt_jump_table_[i].address; | 351 Address entry = deopt_jump_table_[i].address; |
| 352 bool is_lazy_deopt = deopt_jump_table_[i].is_lazy_deopt; | 352 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; |
| 353 Deoptimizer::BailoutType type = | |
| 354 is_lazy_deopt ? Deoptimizer::LAZY : Deoptimizer::EAGER; | |
| 355 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 353 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
| 356 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 354 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
| 357 Comment(";;; jump table entry %d.", i); | 355 Comment(";;; jump table entry %d.", i); |
| 358 } else { | 356 } else { |
| 359 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 357 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 360 } | 358 } |
| 361 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); | 359 __ li(t9, Operand(ExternalReference::ForDeoptEntry(entry))); |
| 362 if (deopt_jump_table_[i].needs_frame) { | 360 if (deopt_jump_table_[i].needs_frame) { |
| 363 if (is_lazy_deopt) { | 361 if (type == Deoptimizer::LAZY) { |
| 364 if (needs_frame_is_call.is_bound()) { | 362 if (needs_frame_is_call.is_bound()) { |
| 365 __ Branch(&needs_frame_is_call); | 363 __ Branch(&needs_frame_is_call); |
| 366 } else { | 364 } else { |
| 367 __ bind(&needs_frame_is_call); | 365 __ bind(&needs_frame_is_call); |
| 368 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); | 366 __ MultiPush(cp.bit() | fp.bit() | ra.bit()); |
| 369 // This variant of deopt can only be used with stubs. Since we don't | 367 // This variant of deopt can only be used with stubs. Since we don't |
| 370 // have a function pointer to install in the stack frame that we're | 368 // have a function pointer to install in the stack frame that we're |
| 371 // building, install a special marker there instead. | 369 // building, install a special marker there instead. |
| 372 ASSERT(info()->IsStub()); | 370 ASSERT(info()->IsStub()); |
| 373 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 371 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 385 // have a function pointer to install in the stack frame that we're | 383 // have a function pointer to install in the stack frame that we're |
| 386 // building, install a special marker there instead. | 384 // building, install a special marker there instead. |
| 387 ASSERT(info()->IsStub()); | 385 ASSERT(info()->IsStub()); |
| 388 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); | 386 __ li(scratch0(), Operand(Smi::FromInt(StackFrame::STUB))); |
| 389 __ push(scratch0()); | 387 __ push(scratch0()); |
| 390 __ Addu(fp, sp, Operand(2 * kPointerSize)); | 388 __ Addu(fp, sp, Operand(2 * kPointerSize)); |
| 391 __ Jump(t9); | 389 __ Jump(t9); |
| 392 } | 390 } |
| 393 } | 391 } |
| 394 } else { | 392 } else { |
| 395 if (is_lazy_deopt) { | 393 if (type == Deoptimizer::LAZY) { |
| 396 __ Call(t9); | 394 __ Call(t9); |
| 397 } else { | 395 } else { |
| 398 __ Jump(t9); | 396 __ Jump(t9); |
| 399 } | 397 } |
| 400 } | 398 } |
| 401 } | 399 } |
| 402 __ RecordComment("]"); | 400 __ RecordComment("]"); |
| 403 | 401 |
| 404 // The deoptimization jump table is the last part of the instruction | 402 // The deoptimization jump table is the last part of the instruction |
| 405 // sequence. Mark the generated code as done unless we bailed out. | 403 // sequence. Mark the generated code as done unless we bailed out. |
| (...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 797 environment->Register(deoptimization_index, | 795 environment->Register(deoptimization_index, |
| 798 translation.index(), | 796 translation.index(), |
| 799 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 797 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
| 800 deoptimizations_.Add(environment, zone()); | 798 deoptimizations_.Add(environment, zone()); |
| 801 } | 799 } |
| 802 } | 800 } |
| 803 | 801 |
| 804 | 802 |
| 805 void LCodeGen::DeoptimizeIf(Condition cc, | 803 void LCodeGen::DeoptimizeIf(Condition cc, |
| 806 LEnvironment* environment, | 804 LEnvironment* environment, |
| 805 Deoptimizer::BailoutType bailout_type, |
| 807 Register src1, | 806 Register src1, |
| 808 const Operand& src2) { | 807 const Operand& src2) { |
| 809 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 808 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
| 810 ASSERT(environment->HasBeenRegistered()); | 809 ASSERT(environment->HasBeenRegistered()); |
| 811 int id = environment->deoptimization_index(); | 810 int id = environment->deoptimization_index(); |
| 812 ASSERT(info()->IsOptimizing() || info()->IsStub()); | 811 ASSERT(info()->IsOptimizing() || info()->IsStub()); |
| 813 Deoptimizer::BailoutType bailout_type = info()->IsStub() | |
| 814 ? Deoptimizer::LAZY | |
| 815 : Deoptimizer::EAGER; | |
| 816 Address entry = | 812 Address entry = |
| 817 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 813 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
| 818 if (entry == NULL) { | 814 if (entry == NULL) { |
| 819 Abort("bailout was not prepared"); | 815 Abort("bailout was not prepared"); |
| 820 return; | 816 return; |
| 821 } | 817 } |
| 822 | 818 |
| 823 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. | 819 ASSERT(FLAG_deopt_every_n_times < 2); // Other values not supported on MIPS. |
| 824 if (FLAG_deopt_every_n_times == 1 && | 820 if (FLAG_deopt_every_n_times == 1 && |
| 825 !info()->IsStub() && | 821 !info()->IsStub() && |
| (...skipping 17 matching lines...) Expand all Loading... |
| 843 if (needs_lazy_deopt) { | 839 if (needs_lazy_deopt) { |
| 844 __ Call(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); | 840 __ Call(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); |
| 845 } else { | 841 } else { |
| 846 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); | 842 __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc, src1, src2); |
| 847 } | 843 } |
| 848 } else { | 844 } else { |
| 849 // We often have several deopts to the same entry, reuse the last | 845 // We often have several deopts to the same entry, reuse the last |
| 850 // jump entry if this is the case. | 846 // jump entry if this is the case. |
| 851 if (deopt_jump_table_.is_empty() || | 847 if (deopt_jump_table_.is_empty() || |
| 852 (deopt_jump_table_.last().address != entry) || | 848 (deopt_jump_table_.last().address != entry) || |
| 853 (deopt_jump_table_.last().is_lazy_deopt != needs_lazy_deopt) || | 849 (deopt_jump_table_.last().bailout_type != bailout_type) || |
| 854 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 850 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { |
| 855 JumpTableEntry table_entry(entry, !frame_is_built_, needs_lazy_deopt); | 851 Deoptimizer::JumpTableEntry table_entry(entry, |
| 852 bailout_type, |
| 853 !frame_is_built_); |
| 856 deopt_jump_table_.Add(table_entry, zone()); | 854 deopt_jump_table_.Add(table_entry, zone()); |
| 857 } | 855 } |
| 858 __ Branch(&deopt_jump_table_.last().label, cc, src1, src2); | 856 __ Branch(&deopt_jump_table_.last().label, cc, src1, src2); |
| 859 } | 857 } |
| 860 } | 858 } |
| 861 | 859 |
| 862 | 860 |
| 861 void LCodeGen::DeoptimizeIf(Condition cc, |
| 862 LEnvironment* environment, |
| 863 Register src1, |
| 864 const Operand& src2) { |
| 865 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
| 866 ? Deoptimizer::LAZY |
| 867 : Deoptimizer::EAGER; |
| 868 DeoptimizeIf(cc, environment, bailout_type, src1, src2); |
| 869 } |
| 870 |
| 871 |
| 872 void LCodeGen::SoftDeoptimize(LEnvironment* environment, |
| 873 Register src1, |
| 874 const Operand& src2) { |
| 875 ASSERT(!info()->IsStub()); |
| 876 DeoptimizeIf(al, environment, Deoptimizer::SOFT, src1, src2); |
| 877 } |
| 878 |
| 879 |
| 863 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { | 880 void LCodeGen::RegisterDependentCodeForEmbeddedMaps(Handle<Code> code) { |
| 864 ZoneList<Handle<Map> > maps(1, zone()); | 881 ZoneList<Handle<Map> > maps(1, zone()); |
| 865 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); | 882 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT); |
| 866 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { | 883 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) { |
| 867 RelocInfo::Mode mode = it.rinfo()->rmode(); | 884 RelocInfo::Mode mode = it.rinfo()->rmode(); |
| 868 if (mode == RelocInfo::EMBEDDED_OBJECT && | 885 if (mode == RelocInfo::EMBEDDED_OBJECT && |
| 869 it.rinfo()->target_object()->IsMap()) { | 886 it.rinfo()->target_object()->IsMap()) { |
| 870 Handle<Map> map(Map::cast(it.rinfo()->target_object())); | 887 Handle<Map> map(Map::cast(it.rinfo()->target_object())); |
| 871 if (map->CanTransition()) { | 888 if (map->CanTransition()) { |
| 872 maps.Add(map, zone()); | 889 maps.Add(map, zone()); |
| (...skipping 4590 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5463 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { | 5480 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { |
| 5464 EnsureSpaceForLazyDeopt(); | 5481 EnsureSpaceForLazyDeopt(); |
| 5465 ASSERT(instr->HasEnvironment()); | 5482 ASSERT(instr->HasEnvironment()); |
| 5466 LEnvironment* env = instr->environment(); | 5483 LEnvironment* env = instr->environment(); |
| 5467 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); | 5484 RegisterEnvironmentForDeoptimization(env, Safepoint::kLazyDeopt); |
| 5468 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | 5485 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
| 5469 } | 5486 } |
| 5470 | 5487 |
| 5471 | 5488 |
| 5472 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5489 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
| 5473 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg)); | 5490 if (instr->hydrogen_value()->IsSoftDeoptimize()) { |
| 5491 SoftDeoptimize(instr->environment(), zero_reg, Operand(zero_reg)); |
| 5492 } else { |
| 5493 DeoptimizeIf(al, instr->environment(), zero_reg, Operand(zero_reg)); |
| 5494 } |
| 5474 } | 5495 } |
| 5475 | 5496 |
| 5476 | 5497 |
| 5477 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5498 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
| 5478 // Nothing to see here, move on! | 5499 // Nothing to see here, move on! |
| 5479 } | 5500 } |
| 5480 | 5501 |
| 5481 | 5502 |
| 5482 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { | 5503 void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) { |
| 5483 Register object = ToRegister(instr->object()); | 5504 Register object = ToRegister(instr->object()); |
| (...skipping 182 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5666 __ Subu(scratch, result, scratch); | 5687 __ Subu(scratch, result, scratch); |
| 5667 __ lw(result, FieldMemOperand(scratch, | 5688 __ lw(result, FieldMemOperand(scratch, |
| 5668 FixedArray::kHeaderSize - kPointerSize)); | 5689 FixedArray::kHeaderSize - kPointerSize)); |
| 5669 __ bind(&done); | 5690 __ bind(&done); |
| 5670 } | 5691 } |
| 5671 | 5692 |
| 5672 | 5693 |
| 5673 #undef __ | 5694 #undef __ |
| 5674 | 5695 |
| 5675 } } // namespace v8::internal | 5696 } } // namespace v8::internal |
| OLD | NEW |