| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
| 6 | 6 |
| 7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
| 8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
| 9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
| 10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
| (...skipping 833 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 844 // This simulates the initial call to bytecode handlers in interpreter entry | 844 // This simulates the initial call to bytecode handlers in interpreter entry |
| 845 // trampoline. The return will never actually be taken, but our stack walker | 845 // trampoline. The return will never actually be taken, but our stack walker |
| 846 // uses this address to determine whether a frame is interpreted. | 846 // uses this address to determine whether a frame is interpreted. |
| 847 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); | 847 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); |
| 848 | 848 |
| 849 Generate_EnterBytecodeDispatch(masm); | 849 Generate_EnterBytecodeDispatch(masm); |
| 850 } | 850 } |
| 851 | 851 |
| 852 | 852 |
| 853 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 853 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
| 854 // ----------- S t a t e ------------- | |
| 855 // -- edx : new target (preserved for callee) | |
| 856 // -- edi : target function (preserved for callee) | |
| 857 // ----------------------------------- | |
| 858 // First lookup code, maybe we don't need to compile! | |
| 859 Label gotta_call_runtime, gotta_call_runtime_no_stack; | |
| 860 Label maybe_call_runtime; | |
| 861 Label try_shared; | |
| 862 Label loop_top, loop_bottom; | |
| 863 | |
| 864 Register closure = edi; | |
| 865 Register new_target = edx; | |
| 866 __ push(new_target); | |
| 867 __ push(closure); | |
| 868 | |
| 869 Register map = eax; | |
| 870 Register index = ebx; | |
| 871 __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
| 872 __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); | |
| 873 __ mov(index, FieldOperand(map, FixedArray::kLengthOffset)); | |
| 874 __ cmp(index, Immediate(Smi::FromInt(2))); | |
| 875 __ j(less, &gotta_call_runtime); | |
| 876 | |
| 877 // Find literals. | |
| 878 // edx : native context | |
| 879 // ebx : length / index | |
| 880 // eax : optimized code map | |
| 881 // stack[0] : new target | |
| 882 // stack[4] : closure | |
| 883 Register native_context = edx; | |
| 884 __ mov(native_context, NativeContextOperand()); | |
| 885 | |
| 886 __ bind(&loop_top); | |
| 887 Register temp = edi; | |
| 888 | |
| 889 // Does the native context match? | |
| 890 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, | |
| 891 SharedFunctionInfo::OffsetToPreviousContext())); | |
| 892 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset)); | |
| 893 __ cmp(temp, native_context); | |
| 894 __ j(not_equal, &loop_bottom); | |
| 895 // OSR id set to none? | |
| 896 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, | |
| 897 SharedFunctionInfo::OffsetToPreviousOsrAstId())); | |
| 898 const int bailout_id = BailoutId::None().ToInt(); | |
| 899 __ cmp(temp, Immediate(Smi::FromInt(bailout_id))); | |
| 900 __ j(not_equal, &loop_bottom); | |
| 901 // Literals available? | |
| 902 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, | |
| 903 SharedFunctionInfo::OffsetToPreviousLiterals())); | |
| 904 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset)); | |
| 905 __ JumpIfSmi(temp, &gotta_call_runtime); | |
| 906 | |
| 907 // Save the literals in the closure. | |
| 908 __ mov(ecx, Operand(esp, 0)); | |
| 909 __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp); | |
| 910 __ push(index); | |
| 911 __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index, | |
| 912 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 913 __ pop(index); | |
| 914 | |
| 915 // Code available? | |
| 916 Register entry = ecx; | |
| 917 __ mov(entry, FieldOperand(map, index, times_half_pointer_size, | |
| 918 SharedFunctionInfo::OffsetToPreviousCachedCode())); | |
| 919 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset)); | |
| 920 __ JumpIfSmi(entry, &maybe_call_runtime); | |
| 921 | |
| 922 // Found literals and code. Get them into the closure and return. | |
| 923 __ pop(closure); | |
| 924 // Store code entry in the closure. | |
| 925 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 926 | |
| 927 Label install_optimized_code_and_tailcall; | |
| 928 __ bind(&install_optimized_code_and_tailcall); | |
| 929 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); | |
| 930 __ RecordWriteCodeEntryField(closure, entry, eax); | |
| 931 | |
| 932 // Link the closure into the optimized function list. | |
| 933 // ecx : code entry | |
| 934 // edx : native context | |
| 935 // edi : closure | |
| 936 __ mov(ebx, | |
| 937 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | |
| 938 __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx); | |
| 939 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax, | |
| 940 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | |
| 941 const int function_list_offset = | |
| 942 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); | |
| 943 __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), | |
| 944 closure); | |
| 945 // Save closure before the write barrier. | |
| 946 __ mov(ebx, closure); | |
| 947 __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax, | |
| 948 kDontSaveFPRegs); | |
| 949 __ mov(closure, ebx); | |
| 950 __ pop(new_target); | |
| 951 __ jmp(entry); | |
| 952 | |
| 953 __ bind(&loop_bottom); | |
| 954 __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength))); | |
| 955 __ cmp(index, Immediate(Smi::FromInt(1))); | |
| 956 __ j(greater, &loop_top); | |
| 957 | |
| 958 // We found neither literals nor code. | |
| 959 __ jmp(&gotta_call_runtime); | |
| 960 | |
| 961 __ bind(&maybe_call_runtime); | |
| 962 __ pop(closure); | |
| 963 | |
| 964 // Last possibility. Check the context free optimized code map entry. | |
| 965 __ mov(entry, FieldOperand(map, FixedArray::kHeaderSize + | |
| 966 SharedFunctionInfo::kSharedCodeIndex)); | |
| 967 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset)); | |
| 968 __ JumpIfSmi(entry, &try_shared); | |
| 969 | |
| 970 // Store code entry in the closure. | |
| 971 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 972 __ jmp(&install_optimized_code_and_tailcall); | |
| 973 | |
| 974 __ bind(&try_shared); | |
| 975 __ pop(new_target); | |
| 976 // Is the full code valid? | |
| 977 __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | |
| 978 __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset)); | |
| 979 __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset)); | |
| 980 __ and_(ebx, Code::KindField::kMask); | |
| 981 __ shr(ebx, Code::KindField::kShift); | |
| 982 __ cmp(ebx, Immediate(Code::BUILTIN)); | |
| 983 __ j(equal, &gotta_call_runtime_no_stack); | |
| 984 // Yes, install the full code. | |
| 985 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); | |
| 986 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); | |
| 987 __ RecordWriteCodeEntryField(closure, entry, eax); | |
| 988 __ jmp(entry); | |
| 989 | |
| 990 __ bind(&gotta_call_runtime); | |
| 991 __ pop(closure); | |
| 992 __ pop(new_target); | |
| 993 __ bind(&gotta_call_runtime_no_stack); | |
| 994 CallRuntimePassFunction(masm, Runtime::kCompileLazy); | 854 CallRuntimePassFunction(masm, Runtime::kCompileLazy); |
| 995 GenerateTailCallToReturnedCode(masm); | 855 GenerateTailCallToReturnedCode(masm); |
| 996 } | 856 } |
| 997 | 857 |
| 998 | 858 |
| 999 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 859 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
| 1000 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); | 860 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); |
| 1001 GenerateTailCallToReturnedCode(masm); | 861 GenerateTailCallToReturnedCode(masm); |
| 1002 } | 862 } |
| 1003 | 863 |
| (...skipping 1913 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2917 | 2777 |
| 2918 __ bind(&ok); | 2778 __ bind(&ok); |
| 2919 __ ret(0); | 2779 __ ret(0); |
| 2920 } | 2780 } |
| 2921 | 2781 |
| 2922 #undef __ | 2782 #undef __ |
| 2923 } // namespace internal | 2783 } // namespace internal |
| 2924 } // namespace v8 | 2784 } // namespace v8 |
| 2925 | 2785 |
| 2926 #endif // V8_TARGET_ARCH_IA32 | 2786 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |