Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/arm64/lithium-codegen-arm64.h" | 7 #include "src/arm64/lithium-codegen-arm64.h" |
| 8 #include "src/arm64/lithium-gap-resolver-arm64.h" | 8 #include "src/arm64/lithium-gap-resolver-arm64.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
| (...skipping 846 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 857 __ Bind(&table_entry->label); | 857 __ Bind(&table_entry->label); |
| 858 | 858 |
| 859 Address entry = table_entry->address; | 859 Address entry = table_entry->address; |
| 860 DeoptComment(table_entry->deopt_info); | 860 DeoptComment(table_entry->deopt_info); |
| 861 | 861 |
| 862 // Second-level deopt table entries are contiguous and small, so instead | 862 // Second-level deopt table entries are contiguous and small, so instead |
| 863 // of loading the full, absolute address of each one, load the base | 863 // of loading the full, absolute address of each one, load the base |
| 864 // address and add an immediate offset. | 864 // address and add an immediate offset. |
| 865 __ Mov(entry_offset, entry - base); | 865 __ Mov(entry_offset, entry - base); |
| 866 | 866 |
| 867 // The last entry can fall through into `call_deopt_entry`, avoiding a | |
| 868 // branch. | |
| 869 bool last_entry = (i + 1) == length; | |
| 870 | |
| 871 if (table_entry->needs_frame) { | 867 if (table_entry->needs_frame) { |
| 872 DCHECK(!info()->saves_caller_doubles()); | 868 DCHECK(!info()->saves_caller_doubles()); |
| 873 if (!needs_frame.is_bound()) { | 869 // Save lr before Bl, fp will be adjusted in the needs_frame code. |
| 874 // This variant of deopt can only be used with stubs. Since we don't | 870 __ Push(lr, fp, cp); |
|
jbramley
2015/03/10 13:11:18
We can save some space here by pushing (lr, fp) he
loislo
2015/03/10 14:42:18
I'll do this in https://codereview.chromium.org/99
| |
| 875 // have a function pointer to install in the stack frame that we're | 871 // Reuse the existing needs_frame code. |
| 876 // building, install a special marker there instead. | 872 __ Bl(&needs_frame); |
| 877 DCHECK(info()->IsStub()); | |
| 878 | |
| 879 UseScratchRegisterScope temps(masm()); | |
| 880 Register stub_marker = temps.AcquireX(); | |
| 881 __ Bind(&needs_frame); | |
| 882 __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); | |
| 883 __ Push(lr, fp, cp, stub_marker); | |
| 884 __ Add(fp, __ StackPointer(), 2 * kPointerSize); | |
| 885 if (!last_entry) __ B(&call_deopt_entry); | |
| 886 } else { | |
| 887 // Reuse the existing needs_frame code. | |
| 888 __ B(&needs_frame); | |
| 889 } | |
| 890 } else if (info()->saves_caller_doubles()) { | 873 } else if (info()->saves_caller_doubles()) { |
| 891 DCHECK(info()->IsStub()); | 874 DCHECK(info()->IsStub()); |
| 892 if (!restore_caller_doubles.is_bound()) { | 875 // Reuse the existing restore_caller_doubles code. |
| 893 __ Bind(&restore_caller_doubles); | 876 __ Bl(&restore_caller_doubles); |
| 894 RestoreCallerDoubles(); | |
| 895 if (!last_entry) __ B(&call_deopt_entry); | |
| 896 } else { | |
| 897 // Reuse the existing restore_caller_doubles code. | |
| 898 __ B(&restore_caller_doubles); | |
| 899 } | |
| 900 } else { | 877 } else { |
| 901 // There is nothing special to do, so just continue to the second-level | 878 // There is nothing special to do, so just continue to the second-level |
| 902 // table. | 879 // table. |
| 903 if (!last_entry) __ B(&call_deopt_entry); | 880 __ Bl(&call_deopt_entry); |
| 904 } | 881 } |
| 905 | 882 |
| 883 bool last_entry = (i + 1) == length; | |
| 906 masm()->CheckConstPool(false, last_entry); | 884 masm()->CheckConstPool(false, last_entry); |
| 907 } | 885 } |
| 908 | 886 |
| 887 if (needs_frame.is_linked()) { | |
| 888 // This variant of deopt can only be used with stubs. Since we don't | |
| 889 // have a function pointer to install in the stack frame that we're | |
| 890 // building, install a special marker there instead. | |
| 891 DCHECK(info()->IsStub()); | |
| 892 | |
| 893 UseScratchRegisterScope temps(masm()); | |
| 894 Register stub_marker = temps.AcquireX(); | |
| 895 __ Bind(&needs_frame); | |
| 896 __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); | |
| 897 __ Push(stub_marker); | |
| 898 __ Add(fp, __ StackPointer(), 2 * kPointerSize); | |
| 899 if (restore_caller_doubles.is_linked()) { | |
| 900 __ B(&call_deopt_entry); | |
| 901 } | |
| 902 } | |
| 903 | |
| 904 if (restore_caller_doubles.is_linked()) { | |
| 905 __ Bind(&restore_caller_doubles); | |
| 906 RestoreCallerDoubles(); | |
| 907 } | |
| 908 | |
| 909 // Generate common code for calling the second-level deopt table. | 909 // Generate common code for calling the second-level deopt table. |
| 910 Register deopt_entry = temps.AcquireX(); | 910 Register deopt_entry = temps.AcquireX(); |
| 911 __ Bind(&call_deopt_entry); | 911 __ Bind(&call_deopt_entry); |
| 912 __ Mov(deopt_entry, Operand(reinterpret_cast<uint64_t>(base), | 912 __ Mov(deopt_entry, Operand(reinterpret_cast<uint64_t>(base), |
| 913 RelocInfo::RUNTIME_ENTRY)); | 913 RelocInfo::RUNTIME_ENTRY)); |
| 914 __ Add(deopt_entry, deopt_entry, entry_offset); | 914 __ Add(deopt_entry, deopt_entry, entry_offset); |
| 915 __ Call(deopt_entry); | 915 __ Br(deopt_entry); |
| 916 } | 916 } |
| 917 | 917 |
| 918 // Force constant pool emission at the end of the deopt jump table to make | 918 // Force constant pool emission at the end of the deopt jump table to make |
| 919 // sure that no constant pools are emitted after. | 919 // sure that no constant pools are emitted after. |
| 920 masm()->CheckConstPool(true, false); | 920 masm()->CheckConstPool(true, false); |
| 921 | 921 |
| 922 // The deoptimization jump table is the last part of the instruction | 922 // The deoptimization jump table is the last part of the instruction |
| 923 // sequence. Mark the generated code as done unless we bailed out. | 923 // sequence. Mark the generated code as done unless we bailed out. |
| 924 if (!is_aborted()) status_ = DONE; | 924 if (!is_aborted()) status_ = DONE; |
| 925 return !is_aborted(); | 925 return !is_aborted(); |
| (...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1145 | 1145 |
| 1146 void LCodeGen::DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr, | 1146 void LCodeGen::DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr, |
| 1147 Deoptimizer::DeoptReason deopt_reason) { | 1147 Deoptimizer::DeoptReason deopt_reason) { |
| 1148 __ TestForMinusZero(input); | 1148 __ TestForMinusZero(input); |
| 1149 DeoptimizeIf(vs, instr, deopt_reason); | 1149 DeoptimizeIf(vs, instr, deopt_reason); |
| 1150 } | 1150 } |
| 1151 | 1151 |
| 1152 | 1152 |
| 1153 void LCodeGen::DeoptimizeIfNotHeapNumber(Register object, LInstruction* instr) { | 1153 void LCodeGen::DeoptimizeIfNotHeapNumber(Register object, LInstruction* instr) { |
| 1154 __ CompareObjectMap(object, Heap::kHeapNumberMapRootIndex); | 1154 __ CompareObjectMap(object, Heap::kHeapNumberMapRootIndex); |
| 1155 DeoptimizeIf(ne, instr, Deoptimizer::kNotHeapNumber); | 1155 DeoptimizeIf(ne, instr, Deoptimizer::kNotAHeapNumber); |
| 1156 } | 1156 } |
| 1157 | 1157 |
| 1158 | 1158 |
| 1159 void LCodeGen::DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr, | 1159 void LCodeGen::DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr, |
| 1160 Deoptimizer::DeoptReason deopt_reason) { | 1160 Deoptimizer::DeoptReason deopt_reason) { |
| 1161 DeoptimizeBranch(instr, deopt_reason, reg_bit_set, rt, bit); | 1161 DeoptimizeBranch(instr, deopt_reason, reg_bit_set, rt, bit); |
| 1162 } | 1162 } |
| 1163 | 1163 |
| 1164 | 1164 |
| 1165 void LCodeGen::DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr, | 1165 void LCodeGen::DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr, |
| (...skipping 4941 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 6107 Handle<ScopeInfo> scope_info = instr->scope_info(); | 6107 Handle<ScopeInfo> scope_info = instr->scope_info(); |
| 6108 __ Push(scope_info); | 6108 __ Push(scope_info); |
| 6109 __ Push(ToRegister(instr->function())); | 6109 __ Push(ToRegister(instr->function())); |
| 6110 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6110 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
| 6111 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6111 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 6112 } | 6112 } |
| 6113 | 6113 |
| 6114 | 6114 |
| 6115 | 6115 |
| 6116 } } // namespace v8::internal | 6116 } } // namespace v8::internal |
| OLD | NEW |