| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_X64 | 5 #if V8_TARGET_ARCH_X64 |
| 6 | 6 |
| 7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
| 8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 895 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 906 // This simulates the initial call to bytecode handlers in interpreter entry | 906 // This simulates the initial call to bytecode handlers in interpreter entry |
| 907 // trampoline. The return will never actually be taken, but our stack walker | 907 // trampoline. The return will never actually be taken, but our stack walker |
| 908 // uses this address to determine whether a frame is interpreted. | 908 // uses this address to determine whether a frame is interpreted. |
| 909 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); | 909 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); |
| 910 | 910 |
| 911 Generate_EnterBytecodeDispatch(masm); | 911 Generate_EnterBytecodeDispatch(masm); |
| 912 } | 912 } |
| 913 | 913 |
| 914 | 914 |
| 915 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 915 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
| 916 // ----------- S t a t e ------------- | |
| 917 // -- rdx : new target (preserved for callee) | |
| 918 // -- rdi : target function (preserved for callee) | |
| 919 // ----------------------------------- | |
| 920 // First lookup code, maybe we don't need to compile! | |
| 921 Label gotta_call_runtime; | |
| 922 Label maybe_call_runtime; | |
| 923 Label try_shared; | |
| 924 Label loop_top, loop_bottom; | |
| 925 | |
| 926 Register closure = rdi; | |
| 927 Register map = r8; | |
| 928 Register index = r9; | |
| 929 __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
| 930 __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); | |
| 931 __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset)); | |
| 932 __ cmpl(index, Immediate(2)); | |
| 933 __ j(less, &gotta_call_runtime); | |
| 934 | |
| 935 // Find literals. | |
| 936 // r14 : native context | |
| 937 // r9 : length / index | |
| 938 // r8 : optimized code map | |
| 939 // rdx : new target | |
| 940 // rdi : closure | |
| 941 Register native_context = r14; | |
| 942 __ movp(native_context, NativeContextOperand()); | |
| 943 | |
| 944 __ bind(&loop_top); | |
| 945 // Native context match? | |
| 946 Register temp = r11; | |
| 947 __ movp(temp, FieldOperand(map, index, times_pointer_size, | |
| 948 SharedFunctionInfo::OffsetToPreviousContext())); | |
| 949 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); | |
| 950 __ cmpp(temp, native_context); | |
| 951 __ j(not_equal, &loop_bottom); | |
| 952 // OSR id set to none? | |
| 953 __ movp(temp, FieldOperand(map, index, times_pointer_size, | |
| 954 SharedFunctionInfo::OffsetToPreviousOsrAstId())); | |
| 955 __ SmiToInteger32(temp, temp); | |
| 956 const int bailout_id = BailoutId::None().ToInt(); | |
| 957 __ cmpl(temp, Immediate(bailout_id)); | |
| 958 __ j(not_equal, &loop_bottom); | |
| 959 // Literals available? | |
| 960 __ movp(temp, FieldOperand(map, index, times_pointer_size, | |
| 961 SharedFunctionInfo::OffsetToPreviousLiterals())); | |
| 962 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); | |
| 963 __ JumpIfSmi(temp, &gotta_call_runtime); | |
| 964 | |
| 965 // Save the literals in the closure. | |
| 966 __ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp); | |
| 967 __ movp(rax, index); | |
| 968 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, rax, | |
| 969 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 970 | |
| 971 // Code available? | |
| 972 Register entry = rcx; | |
| 973 __ movp(entry, | |
| 974 FieldOperand(map, index, times_pointer_size, | |
| 975 SharedFunctionInfo::OffsetToPreviousCachedCode())); | |
| 976 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); | |
| 977 __ JumpIfSmi(entry, &maybe_call_runtime); | |
| 978 | |
| 979 // Found literals and code. Get them into the closure and return. | |
| 980 __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 981 | |
| 982 Label install_optimized_code_and_tailcall; | |
| 983 __ bind(&install_optimized_code_and_tailcall); | |
| 984 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); | |
| 985 | |
| 986 // Link the closure into the optimized function list. | |
| 987 // rcx : code entry (entry) | |
| 988 // r14 : native context | |
| 989 // rdx : new target | |
| 990 // rdi : closure | |
| 991 __ movp(rbx, | |
| 992 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
| 993 __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx); | |
| 994 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, rax, | |
| 995 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 996 const int function_list_offset = | |
| 997 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); | |
| 998 __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), | |
| 999 closure); | |
| 1000 // Save closure before the write barrier. | |
| 1001 __ movp(rbx, closure); | |
| 1002 __ RecordWriteContextSlot(native_context, function_list_offset, closure, rax, | |
| 1003 kDontSaveFPRegs); | |
| 1004 __ movp(closure, rbx); | |
| 1005 __ jmp(entry); | |
| 1006 | |
| 1007 __ bind(&loop_bottom); | |
| 1008 __ subl(index, Immediate(SharedFunctionInfo::kEntryLength)); | |
| 1009 __ cmpl(index, Immediate(1)); | |
| 1010 __ j(greater, &loop_top); | |
| 1011 | |
| 1012 // We found neither literals nor code. | |
| 1013 __ jmp(&gotta_call_runtime); | |
| 1014 | |
| 1015 __ bind(&maybe_call_runtime); | |
| 1016 | |
| 1017 // Last possibility. Check the context free optimized code map entry. | |
| 1018 __ movp(entry, FieldOperand(map, FixedArray::kHeaderSize + | |
| 1019 SharedFunctionInfo::kSharedCodeIndex)); | |
| 1020 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); | |
| 1021 __ JumpIfSmi(entry, &try_shared); | |
| 1022 | |
| 1023 // Store code entry in the closure. | |
| 1024 __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 1025 __ jmp(&install_optimized_code_and_tailcall); | |
| 1026 | |
| 1027 __ bind(&try_shared); | |
| 1028 // Is the full code valid? | |
| 1029 __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
| 1030 __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset)); | |
| 1031 __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset)); | |
| 1032 __ andl(rbx, Immediate(Code::KindField::kMask)); | |
| 1033 __ shrl(rbx, Immediate(Code::KindField::kShift)); | |
| 1034 __ cmpl(rbx, Immediate(Code::BUILTIN)); | |
| 1035 __ j(equal, &gotta_call_runtime); | |
| 1036 // Yes, install the full code. | |
| 1037 __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 1038 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); | |
| 1039 __ jmp(entry); | |
| 1040 | |
| 1041 __ bind(&gotta_call_runtime); | |
| 1042 CallRuntimePassFunction(masm, Runtime::kCompileLazy); | 916 CallRuntimePassFunction(masm, Runtime::kCompileLazy); |
| 1043 GenerateTailCallToReturnedCode(masm); | 917 GenerateTailCallToReturnedCode(masm); |
| 1044 } | 918 } |
| 1045 | 919 |
| 1046 | 920 |
| 1047 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 921 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 1048 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); | 922 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); |
| 1049 GenerateTailCallToReturnedCode(masm); | 923 GenerateTailCallToReturnedCode(masm); |
| 1050 } | 924 } |
| 1051 | 925 |
| (...skipping 1826 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2878 __ ret(0); | 2752 __ ret(0); |
| 2879 } | 2753 } |
| 2880 | 2754 |
| 2881 | 2755 |
| 2882 #undef __ | 2756 #undef __ |
| 2883 | 2757 |
| 2884 } // namespace internal | 2758 } // namespace internal |
| 2885 } // namespace v8 | 2759 } // namespace v8 |
| 2886 | 2760 |
| 2887 #endif // V8_TARGET_ARCH_X64 | 2761 #endif // V8_TARGET_ARCH_X64 |
| OLD | NEW |