OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/arm/lithium-codegen-arm.h" | 7 #include "src/arm/lithium-codegen-arm.h" |
8 #include "src/arm/lithium-gap-resolver-arm.h" | 8 #include "src/arm/lithium-gap-resolver-arm.h" |
9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
(...skipping 318 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
329 if (deopt_jump_table_.length() > 0) { | 329 if (deopt_jump_table_.length() > 0) { |
330 Label needs_frame, call_deopt_entry; | 330 Label needs_frame, call_deopt_entry; |
331 | 331 |
332 Comment(";;; -------------------- Jump table --------------------"); | 332 Comment(";;; -------------------- Jump table --------------------"); |
333 Address base = deopt_jump_table_[0].address; | 333 Address base = deopt_jump_table_[0].address; |
334 | 334 |
335 Register entry_offset = scratch0(); | 335 Register entry_offset = scratch0(); |
336 | 336 |
337 int length = deopt_jump_table_.length(); | 337 int length = deopt_jump_table_.length(); |
338 for (int i = 0; i < length; i++) { | 338 for (int i = 0; i < length; i++) { |
339 __ bind(&deopt_jump_table_[i].label); | 339 Deoptimizer::JumpTableEntry* table_entry = &deopt_jump_table_[i]; |
| 340 __ bind(&table_entry->label); |
340 | 341 |
341 Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type; | 342 Deoptimizer::BailoutType type = table_entry->bailout_type; |
342 DCHECK(type == deopt_jump_table_[0].bailout_type); | 343 DCHECK(type == deopt_jump_table_[0].bailout_type); |
343 Address entry = deopt_jump_table_[i].address; | 344 Address entry = table_entry->address; |
344 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 345 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
345 DCHECK(id != Deoptimizer::kNotDeoptimizationEntry); | 346 DCHECK(id != Deoptimizer::kNotDeoptimizationEntry); |
346 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 347 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 348 DeoptComment(table_entry->mnemonic, table_entry->reason); |
347 | 349 |
348 // Second-level deopt table entries are contiguous and small, so instead | 350 // Second-level deopt table entries are contiguous and small, so instead |
349 // of loading the full, absolute address of each one, load an immediate | 351 // of loading the full, absolute address of each one, load an immediate |
350 // offset which will be added to the base address later. | 352 // offset which will be added to the base address later. |
351 __ mov(entry_offset, Operand(entry - base)); | 353 __ mov(entry_offset, Operand(entry - base)); |
352 | 354 |
353 if (deopt_jump_table_[i].needs_frame) { | 355 if (table_entry->needs_frame) { |
354 DCHECK(!info()->saves_caller_doubles()); | 356 DCHECK(!info()->saves_caller_doubles()); |
355 if (needs_frame.is_bound()) { | 357 if (needs_frame.is_bound()) { |
356 __ b(&needs_frame); | 358 __ b(&needs_frame); |
357 } else { | 359 } else { |
358 __ bind(&needs_frame); | 360 __ bind(&needs_frame); |
359 Comment(";;; call deopt with frame"); | 361 Comment(";;; call deopt with frame"); |
360 __ PushFixedFrame(); | 362 __ PushFixedFrame(); |
361 // This variant of deopt can only be used with stubs. Since we don't | 363 // This variant of deopt can only be used with stubs. Since we don't |
362 // have a function pointer to install in the stack frame that we're | 364 // have a function pointer to install in the stack frame that we're |
363 // building, install a special marker there instead. | 365 // building, install a special marker there instead. |
(...skipping 476 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
840 int pc_offset = masm()->pc_offset(); | 842 int pc_offset = masm()->pc_offset(); |
841 environment->Register(deoptimization_index, | 843 environment->Register(deoptimization_index, |
842 translation.index(), | 844 translation.index(), |
843 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); | 845 (mode == Safepoint::kLazyDeopt) ? pc_offset : -1); |
844 deoptimizations_.Add(environment, zone()); | 846 deoptimizations_.Add(environment, zone()); |
845 } | 847 } |
846 } | 848 } |
847 | 849 |
848 | 850 |
849 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, | 851 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, |
| 852 const char* reason, |
850 Deoptimizer::BailoutType bailout_type) { | 853 Deoptimizer::BailoutType bailout_type) { |
851 LEnvironment* environment = instr->environment(); | 854 LEnvironment* environment = instr->environment(); |
852 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); | 855 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); |
853 DCHECK(environment->HasBeenRegistered()); | 856 DCHECK(environment->HasBeenRegistered()); |
854 int id = environment->deoptimization_index(); | 857 int id = environment->deoptimization_index(); |
855 DCHECK(info()->IsOptimizing() || info()->IsStub()); | 858 DCHECK(info()->IsOptimizing() || info()->IsStub()); |
856 Address entry = | 859 Address entry = |
857 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); | 860 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); |
858 if (entry == NULL) { | 861 if (entry == NULL) { |
859 Abort(kBailoutWasNotPrepared); | 862 Abort(kBailoutWasNotPrepared); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
897 | 900 |
898 if (info()->ShouldTrapOnDeopt()) { | 901 if (info()->ShouldTrapOnDeopt()) { |
899 __ stop("trap_on_deopt", condition); | 902 __ stop("trap_on_deopt", condition); |
900 } | 903 } |
901 | 904 |
902 DCHECK(info()->IsStub() || frame_is_built_); | 905 DCHECK(info()->IsStub() || frame_is_built_); |
903 // Go through jump table if we need to handle condition, build frame, or | 906 // Go through jump table if we need to handle condition, build frame, or |
904 // restore caller doubles. | 907 // restore caller doubles. |
905 if (condition == al && frame_is_built_ && | 908 if (condition == al && frame_is_built_ && |
906 !info()->saves_caller_doubles()) { | 909 !info()->saves_caller_doubles()) { |
| 910 DeoptComment(instr->Mnemonic(), reason); |
907 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | 911 __ Call(entry, RelocInfo::RUNTIME_ENTRY); |
908 } else { | 912 } else { |
909 // We often have several deopts to the same entry, reuse the last | 913 // We often have several deopts to the same entry, reuse the last |
910 // jump entry if this is the case. | 914 // jump entry if this is the case. |
911 if (deopt_jump_table_.is_empty() || | 915 if (deopt_jump_table_.is_empty() || |
912 (deopt_jump_table_.last().address != entry) || | 916 (deopt_jump_table_.last().address != entry) || |
913 (deopt_jump_table_.last().bailout_type != bailout_type) || | 917 (deopt_jump_table_.last().bailout_type != bailout_type) || |
914 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { | 918 (deopt_jump_table_.last().needs_frame != !frame_is_built_)) { |
915 Deoptimizer::JumpTableEntry table_entry(entry, | 919 Deoptimizer::JumpTableEntry table_entry(entry, instr->Mnemonic(), reason, |
916 bailout_type, | 920 bailout_type, !frame_is_built_); |
917 !frame_is_built_); | |
918 deopt_jump_table_.Add(table_entry, zone()); | 921 deopt_jump_table_.Add(table_entry, zone()); |
919 } | 922 } |
920 __ b(condition, &deopt_jump_table_.last().label); | 923 __ b(condition, &deopt_jump_table_.last().label); |
921 } | 924 } |
922 } | 925 } |
923 | 926 |
924 | 927 |
925 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr) { | 928 void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr, |
| 929 const char* reason) { |
926 Deoptimizer::BailoutType bailout_type = info()->IsStub() | 930 Deoptimizer::BailoutType bailout_type = info()->IsStub() |
927 ? Deoptimizer::LAZY | 931 ? Deoptimizer::LAZY |
928 : Deoptimizer::EAGER; | 932 : Deoptimizer::EAGER; |
929 DeoptimizeIf(condition, instr, bailout_type); | 933 DeoptimizeIf(condition, instr, reason, bailout_type); |
930 } | 934 } |
931 | 935 |
932 | 936 |
933 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { | 937 void LCodeGen::PopulateDeoptimizationData(Handle<Code> code) { |
934 int length = deoptimizations_.length(); | 938 int length = deoptimizations_.length(); |
935 if (length == 0) return; | 939 if (length == 0) return; |
936 Handle<DeoptimizationInputData> data = | 940 Handle<DeoptimizationInputData> data = |
937 DeoptimizationInputData::New(isolate(), length, TENURED); | 941 DeoptimizationInputData::New(isolate(), length, TENURED); |
938 | 942 |
939 Handle<ByteArray> translations = | 943 Handle<ByteArray> translations = |
(...skipping 4718 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5658 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { | 5662 void LCodeGen::DoDeoptimize(LDeoptimize* instr) { |
5659 Deoptimizer::BailoutType type = instr->hydrogen()->type(); | 5663 Deoptimizer::BailoutType type = instr->hydrogen()->type(); |
5660 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the | 5664 // TODO(danno): Stubs expect all deopts to be lazy for historical reasons (the |
5661 // needed return address), even though the implementation of LAZY and EAGER is | 5665 // needed return address), even though the implementation of LAZY and EAGER is |
5662 // now identical. When LAZY is eventually completely folded into EAGER, remove | 5666 // now identical. When LAZY is eventually completely folded into EAGER, remove |
5663 // the special case below. | 5667 // the special case below. |
5664 if (info()->IsStub() && type == Deoptimizer::EAGER) { | 5668 if (info()->IsStub() && type == Deoptimizer::EAGER) { |
5665 type = Deoptimizer::LAZY; | 5669 type = Deoptimizer::LAZY; |
5666 } | 5670 } |
5667 | 5671 |
5668 Comment(";;; deoptimize: %s", instr->hydrogen()->reason()); | 5672 DeoptimizeIf(al, instr, instr->hydrogen()->reason(), type); |
5669 DeoptimizeIf(al, instr, type); | |
5670 } | 5673 } |
5671 | 5674 |
5672 | 5675 |
5673 void LCodeGen::DoDummy(LDummy* instr) { | 5676 void LCodeGen::DoDummy(LDummy* instr) { |
5674 // Nothing to see here, move on! | 5677 // Nothing to see here, move on! |
5675 } | 5678 } |
5676 | 5679 |
5677 | 5680 |
5678 void LCodeGen::DoDummyUse(LDummyUse* instr) { | 5681 void LCodeGen::DoDummyUse(LDummyUse* instr) { |
5679 // Nothing to see here, move on! | 5682 // Nothing to see here, move on! |
(...skipping 230 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5910 __ Push(scope_info); | 5913 __ Push(scope_info); |
5911 __ push(ToRegister(instr->function())); | 5914 __ push(ToRegister(instr->function())); |
5912 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 5915 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
5913 RecordSafepoint(Safepoint::kNoLazyDeopt); | 5916 RecordSafepoint(Safepoint::kNoLazyDeopt); |
5914 } | 5917 } |
5915 | 5918 |
5916 | 5919 |
5917 #undef __ | 5920 #undef __ |
5918 | 5921 |
5919 } } // namespace v8::internal | 5922 } } // namespace v8::internal |
OLD | NEW |