| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/v8.h" | 5 #include "src/v8.h" |
| 6 | 6 |
| 7 #include "src/arm64/lithium-codegen-arm64.h" | 7 #include "src/arm64/lithium-codegen-arm64.h" |
| 8 #include "src/arm64/lithium-gap-resolver-arm64.h" | 8 #include "src/arm64/lithium-gap-resolver-arm64.h" |
| 9 #include "src/base/bits.h" | 9 #include "src/base/bits.h" |
| 10 #include "src/code-factory.h" | 10 #include "src/code-factory.h" |
| (...skipping 824 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 835 // sure that no constant pools are emitted after deferred code because | 835 // sure that no constant pools are emitted after deferred code because |
| 836 // deferred code generation is the last step which generates code. The two | 836 // deferred code generation is the last step which generates code. The two |
| 837 // following steps will only output data used by crakshaft. | 837 // following steps will only output data used by crakshaft. |
| 838 masm()->CheckConstPool(true, false); | 838 masm()->CheckConstPool(true, false); |
| 839 | 839 |
| 840 return !is_aborted(); | 840 return !is_aborted(); |
| 841 } | 841 } |
| 842 | 842 |
| 843 | 843 |
| 844 bool LCodeGen::GenerateJumpTable() { | 844 bool LCodeGen::GenerateJumpTable() { |
| 845 Label needs_frame, restore_caller_doubles, call_deopt_entry; | 845 Label needs_frame, call_deopt_entry; |
| 846 | 846 |
| 847 if (jump_table_.length() > 0) { | 847 if (jump_table_.length() > 0) { |
| 848 Comment(";;; -------------------- Jump table --------------------"); | 848 Comment(";;; -------------------- Jump table --------------------"); |
| 849 Address base = jump_table_[0]->address; | 849 Address base = jump_table_[0]->address; |
| 850 | 850 |
| 851 UseScratchRegisterScope temps(masm()); | 851 UseScratchRegisterScope temps(masm()); |
| 852 Register entry_offset = temps.AcquireX(); | 852 Register entry_offset = temps.AcquireX(); |
| 853 | 853 |
| 854 int length = jump_table_.length(); | 854 int length = jump_table_.length(); |
| 855 for (int i = 0; i < length; i++) { | 855 for (int i = 0; i < length; i++) { |
| 856 Deoptimizer::JumpTableEntry* table_entry = jump_table_[i]; | 856 Deoptimizer::JumpTableEntry* table_entry = jump_table_[i]; |
| 857 __ Bind(&table_entry->label); | 857 __ Bind(&table_entry->label); |
| 858 | 858 |
| 859 Address entry = table_entry->address; | 859 Address entry = table_entry->address; |
| 860 DeoptComment(table_entry->deopt_info); | 860 DeoptComment(table_entry->deopt_info); |
| 861 | 861 |
| 862 // Second-level deopt table entries are contiguous and small, so instead | 862 // Second-level deopt table entries are contiguous and small, so instead |
| 863 // of loading the full, absolute address of each one, load the base | 863 // of loading the full, absolute address of each one, load the base |
| 864 // address and add an immediate offset. | 864 // address and add an immediate offset. |
| 865 __ Mov(entry_offset, entry - base); | 865 __ Mov(entry_offset, entry - base); |
| 866 | 866 |
| 867 if (table_entry->needs_frame) { | 867 if (table_entry->needs_frame) { |
| 868 DCHECK(!info()->saves_caller_doubles()); | 868 DCHECK(!info()->saves_caller_doubles()); |
| 869 Comment(";;; call deopt with frame"); |
| 869 // Save lr before Bl, fp will be adjusted in the needs_frame code. | 870 // Save lr before Bl, fp will be adjusted in the needs_frame code. |
| 870 __ Push(lr, fp, cp); | 871 __ Push(lr, fp); |
| 871 // Reuse the existing needs_frame code. | 872 // Reuse the existing needs_frame code. |
| 872 __ Bl(&needs_frame); | 873 __ Bl(&needs_frame); |
| 873 } else if (info()->saves_caller_doubles()) { | |
| 874 DCHECK(info()->IsStub()); | |
| 875 // Reuse the existing restore_caller_doubles code. | |
| 876 __ Bl(&restore_caller_doubles); | |
| 877 } else { | 874 } else { |
| 878 // There is nothing special to do, so just continue to the second-level | 875 // There is nothing special to do, so just continue to the second-level |
| 879 // table. | 876 // table. |
| 880 __ Bl(&call_deopt_entry); | 877 __ Bl(&call_deopt_entry); |
| 881 } | 878 } |
| 882 | 879 |
| 883 bool last_entry = (i + 1) == length; | 880 masm()->CheckConstPool(false, false); |
| 884 masm()->CheckConstPool(false, last_entry); | |
| 885 } | 881 } |
| 886 | 882 |
| 887 if (needs_frame.is_linked()) { | 883 if (needs_frame.is_linked()) { |
| 888 // This variant of deopt can only be used with stubs. Since we don't | 884 // This variant of deopt can only be used with stubs. Since we don't |
| 889 // have a function pointer to install in the stack frame that we're | 885 // have a function pointer to install in the stack frame that we're |
| 890 // building, install a special marker there instead. | 886 // building, install a special marker there instead. |
| 891 DCHECK(info()->IsStub()); | 887 DCHECK(info()->IsStub()); |
| 892 | 888 |
| 889 Comment(";;; needs_frame common code"); |
| 893 UseScratchRegisterScope temps(masm()); | 890 UseScratchRegisterScope temps(masm()); |
| 894 Register stub_marker = temps.AcquireX(); | 891 Register stub_marker = temps.AcquireX(); |
| 895 __ Bind(&needs_frame); | 892 __ Bind(&needs_frame); |
| 896 __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); | 893 __ Mov(stub_marker, Smi::FromInt(StackFrame::STUB)); |
| 897 __ Push(stub_marker); | 894 __ Push(cp, stub_marker); |
| 898 __ Add(fp, __ StackPointer(), 2 * kPointerSize); | 895 __ Add(fp, __ StackPointer(), 2 * kPointerSize); |
| 899 if (restore_caller_doubles.is_linked()) { | |
| 900 __ B(&call_deopt_entry); | |
| 901 } | |
| 902 } | 896 } |
| 903 | 897 |
| 904 if (restore_caller_doubles.is_linked()) { | 898 // Generate common code for calling the second-level deopt table. |
| 905 __ Bind(&restore_caller_doubles); | 899 __ Bind(&call_deopt_entry); |
| 900 |
| 901 if (info()->saves_caller_doubles()) { |
| 902 DCHECK(info()->IsStub()); |
| 906 RestoreCallerDoubles(); | 903 RestoreCallerDoubles(); |
| 907 } | 904 } |
| 908 | 905 |
| 909 // Generate common code for calling the second-level deopt table. | |
| 910 Register deopt_entry = temps.AcquireX(); | 906 Register deopt_entry = temps.AcquireX(); |
| 911 __ Bind(&call_deopt_entry); | |
| 912 __ Mov(deopt_entry, Operand(reinterpret_cast<uint64_t>(base), | 907 __ Mov(deopt_entry, Operand(reinterpret_cast<uint64_t>(base), |
| 913 RelocInfo::RUNTIME_ENTRY)); | 908 RelocInfo::RUNTIME_ENTRY)); |
| 914 __ Add(deopt_entry, deopt_entry, entry_offset); | 909 __ Add(deopt_entry, deopt_entry, entry_offset); |
| 915 __ Br(deopt_entry); | 910 __ Br(deopt_entry); |
| 916 } | 911 } |
| 917 | 912 |
| 918 // Force constant pool emission at the end of the deopt jump table to make | 913 // Force constant pool emission at the end of the deopt jump table to make |
| 919 // sure that no constant pools are emitted after. | 914 // sure that no constant pools are emitted after. |
| 920 masm()->CheckConstPool(true, false); | 915 masm()->CheckConstPool(true, false); |
| 921 | 916 |
| (...skipping 5185 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6107 Handle<ScopeInfo> scope_info = instr->scope_info(); | 6102 Handle<ScopeInfo> scope_info = instr->scope_info(); |
| 6108 __ Push(scope_info); | 6103 __ Push(scope_info); |
| 6109 __ Push(ToRegister(instr->function())); | 6104 __ Push(ToRegister(instr->function())); |
| 6110 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6105 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
| 6111 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6106 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 6112 } | 6107 } |
| 6113 | 6108 |
| 6114 | 6109 |
| 6115 | 6110 |
| 6116 } } // namespace v8::internal | 6111 } } // namespace v8::internal |
| OLD | NEW |