OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "v8.h" | 5 #include "v8.h" |
6 | 6 |
7 #include "arm64/lithium-codegen-arm64.h" | 7 #include "arm64/lithium-codegen-arm64.h" |
8 #include "arm64/lithium-gap-resolver-arm64.h" | 8 #include "arm64/lithium-gap-resolver-arm64.h" |
9 #include "code-stubs.h" | 9 #include "code-stubs.h" |
10 #include "stub-cache.h" | 10 #include "stub-cache.h" |
(...skipping 814 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
825 // sure that no constant pools are emitted after deferred code because | 825 // sure that no constant pools are emitted after deferred code because |
826 // deferred code generation is the last step which generates code. The two | 826 // deferred code generation is the last step which generates code. The two |
827 // following steps will only output data used by crakshaft. | 827 // following steps will only output data used by crakshaft. |
828 masm()->CheckConstPool(true, false); | 828 masm()->CheckConstPool(true, false); |
829 | 829 |
830 return !is_aborted(); | 830 return !is_aborted(); |
831 } | 831 } |
832 | 832 |
833 | 833 |
834 bool LCodeGen::GenerateDeoptJumpTable() { | 834 bool LCodeGen::GenerateDeoptJumpTable() { |
| 835 Label needs_frame, restore_caller_doubles, call_deopt_entry; |
| 836 |
835 if (deopt_jump_table_.length() > 0) { | 837 if (deopt_jump_table_.length() > 0) { |
836 Comment(";;; -------------------- Jump table --------------------"); | 838 Comment(";;; -------------------- Jump table --------------------"); |
837 } | 839 Address base = deopt_jump_table_[0]->address; |
838 Label table_start; | 840 |
839 __ bind(&table_start); | 841 UseScratchRegisterScope temps(masm()); |
840 Label needs_frame; | 842 Register entry_offset = temps.AcquireX(); |
841 for (int i = 0; i < deopt_jump_table_.length(); i++) { | 843 |
842 __ Bind(&deopt_jump_table_[i]->label); | 844 int length = deopt_jump_table_.length(); |
843 Address entry = deopt_jump_table_[i]->address; | 845 for (int i = 0; i < length; i++) { |
844 Deoptimizer::BailoutType type = deopt_jump_table_[i]->bailout_type; | 846 __ Bind(&deopt_jump_table_[i]->label); |
845 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); | 847 |
846 if (id == Deoptimizer::kNotDeoptimizationEntry) { | 848 Deoptimizer::BailoutType type = deopt_jump_table_[i]->bailout_type; |
847 Comment(";;; jump table entry %d.", i); | 849 Address entry = deopt_jump_table_[i]->address; |
848 } else { | 850 int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type); |
849 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); | 851 if (id == Deoptimizer::kNotDeoptimizationEntry) { |
| 852 Comment(";;; jump table entry %d.", i); |
| 853 } else { |
| 854 Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id); |
| 855 } |
| 856 |
| 857 // Second-level deopt table entries are contiguous and small, so instead |
| 858 // of loading the full, absolute address of each one, load the base |
| 859 // address and add an immediate offset. |
| 860 __ Mov(entry_offset, entry - base); |
| 861 |
| 862 // The last entry can fall through into `call_deopt_entry`, avoiding a |
| 863 // branch. |
| 864 bool last_entry = (i + 1) == length; |
| 865 |
| 866 if (deopt_jump_table_[i]->needs_frame) { |
| 867 ASSERT(!info()->saves_caller_doubles()); |
| 868 if (!needs_frame.is_bound()) { |
| 869 // This variant of deopt can only be used with stubs. Since we don't |
| 870 // have a function pointer to install in the stack frame that we're |
| 871 // building, install a special marker there instead. |
| 872 ASSERT(info()->IsStub()); |
| 873 |
| 874 UseScratchRegisterScope temps(masm()); |
| 875 Register stub_marker = temps.AcquireX(); |
| 876 __ Bind(&needs_frame); |
| 877 __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); |
| 878 __ Push(lr, fp, cp, stub_marker); |
| 879 __ Add(fp, __ StackPointer(), 2 * kPointerSize); |
| 880 if (!last_entry) __ B(&call_deopt_entry); |
| 881 } else { |
| 882 // Reuse the existing needs_frame code. |
| 883 __ B(&needs_frame); |
| 884 } |
| 885 } else if (info()->saves_caller_doubles()) { |
| 886 ASSERT(info()->IsStub()); |
| 887 if (!restore_caller_doubles.is_bound()) { |
| 888 __ Bind(&restore_caller_doubles); |
| 889 RestoreCallerDoubles(); |
| 890 if (!last_entry) __ B(&call_deopt_entry); |
| 891 } else { |
| 892 // Reuse the existing restore_caller_doubles code. |
| 893 __ B(&restore_caller_doubles); |
| 894 } |
| 895 } else { |
| 896 // There is nothing special to do, so just continue to the second-level |
| 897 // table. |
| 898 if (!last_entry) __ B(&call_deopt_entry); |
| 899 } |
| 900 |
| 901 masm()->CheckConstPool(false, last_entry); |
850 } | 902 } |
851 if (deopt_jump_table_[i]->needs_frame) { | |
852 ASSERT(!info()->saves_caller_doubles()); | |
853 | 903 |
854 UseScratchRegisterScope temps(masm()); | 904 // Generate common code for calling the second-level deopt table. |
855 Register stub_deopt_entry = temps.AcquireX(); | 905 Register deopt_entry = temps.AcquireX(); |
856 Register stub_marker = temps.AcquireX(); | 906 __ Bind(&call_deopt_entry); |
857 | 907 __ Mov(deopt_entry, Operand(reinterpret_cast<uint64_t>(base), |
858 __ Mov(stub_deopt_entry, ExternalReference::ForDeoptEntry(entry)); | 908 RelocInfo::RUNTIME_ENTRY)); |
859 if (needs_frame.is_bound()) { | 909 __ Add(deopt_entry, deopt_entry, entry_offset); |
860 __ B(&needs_frame); | 910 __ Call(deopt_entry); |
861 } else { | |
862 __ Bind(&needs_frame); | |
863 // This variant of deopt can only be used with stubs. Since we don't | |
864 // have a function pointer to install in the stack frame that we're | |
865 // building, install a special marker there instead. | |
866 ASSERT(info()->IsStub()); | |
867 __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); | |
868 __ Push(lr, fp, cp, stub_marker); | |
869 __ Add(fp, __ StackPointer(), 2 * kPointerSize); | |
870 __ Call(stub_deopt_entry); | |
871 } | |
872 } else { | |
873 if (info()->saves_caller_doubles()) { | |
874 ASSERT(info()->IsStub()); | |
875 RestoreCallerDoubles(); | |
876 } | |
877 __ Call(entry, RelocInfo::RUNTIME_ENTRY); | |
878 } | |
879 masm()->CheckConstPool(false, false); | |
880 } | 911 } |
881 | 912 |
882 // Force constant pool emission at the end of the deopt jump table to make | 913 // Force constant pool emission at the end of the deopt jump table to make |
883 // sure that no constant pools are emitted after. | 914 // sure that no constant pools are emitted after. |
884 masm()->CheckConstPool(true, false); | 915 masm()->CheckConstPool(true, false); |
885 | 916 |
886 // The deoptimization jump table is the last part of the instruction | 917 // The deoptimization jump table is the last part of the instruction |
887 // sequence. Mark the generated code as done unless we bailed out. | 918 // sequence. Mark the generated code as done unless we bailed out. |
888 if (!is_aborted()) status_ = DONE; | 919 if (!is_aborted()) status_ = DONE; |
889 return !is_aborted(); | 920 return !is_aborted(); |
(...skipping 5113 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6003 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 6034 __ Ldr(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
6004 // Index is equal to negated out of object property index plus 1. | 6035 // Index is equal to negated out of object property index plus 1. |
6005 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); | 6036 __ Sub(result, result, Operand::UntagSmiAndScale(index, kPointerSizeLog2)); |
6006 __ Ldr(result, FieldMemOperand(result, | 6037 __ Ldr(result, FieldMemOperand(result, |
6007 FixedArray::kHeaderSize - kPointerSize)); | 6038 FixedArray::kHeaderSize - kPointerSize)); |
6008 __ Bind(deferred->exit()); | 6039 __ Bind(deferred->exit()); |
6009 __ Bind(&done); | 6040 __ Bind(&done); |
6010 } | 6041 } |
6011 | 6042 |
6012 } } // namespace v8::internal | 6043 } } // namespace v8::internal |
OLD | NEW |