OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_X87 | 5 #if V8_TARGET_ARCH_X87 |
6 | 6 |
7 #include "src/code-factory.h" | 7 #include "src/code-factory.h" |
8 #include "src/codegen.h" | 8 #include "src/codegen.h" |
9 #include "src/deoptimizer.h" | 9 #include "src/deoptimizer.h" |
10 #include "src/full-codegen/full-codegen.h" | 10 #include "src/full-codegen/full-codegen.h" |
(...skipping 810 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
821 // This simulates the initial call to bytecode handlers in interpreter entry | 821 // This simulates the initial call to bytecode handlers in interpreter entry |
822 // trampoline. The return will never actually be taken, but our stack walker | 822 // trampoline. The return will never actually be taken, but our stack walker |
823 // uses this address to determine whether a frame is interpreted. | 823 // uses this address to determine whether a frame is interpreted. |
824 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); | 824 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); |
825 | 825 |
826 Generate_EnterBytecodeDispatch(masm); | 826 Generate_EnterBytecodeDispatch(masm); |
827 } | 827 } |
828 | 828 |
829 | 829 |
830 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 830 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
| 831 // ----------- S t a t e ------------- |
| 832 // -- eax : argument count (preserved for callee) |
| 833 // -- edx : new target (preserved for callee) |
| 834 // -- edi : target function (preserved for callee) |
| 835 // ----------------------------------- |
| 836 // First lookup code, maybe we don't need to compile! |
| 837 Label gotta_call_runtime, gotta_call_runtime_no_stack; |
| 838 Label maybe_call_runtime; |
| 839 Label try_shared; |
| 840 Label loop_top, loop_bottom; |
| 841 |
| 842 Register closure = edi; |
| 843 Register new_target = edx; |
| 844 Register argument_count = eax; |
| 845 |
| 846 __ push(argument_count); |
| 847 __ push(new_target); |
| 848 __ push(closure); |
| 849 |
| 850 Register map = argument_count; |
| 851 Register index = ebx; |
| 852 __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); |
| 853 __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); |
| 854 __ mov(index, FieldOperand(map, FixedArray::kLengthOffset)); |
| 855 __ cmp(index, Immediate(Smi::FromInt(2))); |
| 856 __ j(less, &gotta_call_runtime); |
| 857 |
| 858 // Find literals. |
| 859 // edx : native context |
| 860 // ebx : length / index |
| 861 // eax : optimized code map |
| 862 // stack[0] : new target |
| 863 // stack[4] : closure |
| 864 Register native_context = edx; |
| 865 __ mov(native_context, NativeContextOperand()); |
| 866 |
| 867 __ bind(&loop_top); |
| 868 Register temp = edi; |
| 869 |
| 870 // Does the native context match? |
| 871 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, |
| 872 SharedFunctionInfo::kOffsetToPreviousContext)); |
| 873 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset)); |
| 874 __ cmp(temp, native_context); |
| 875 __ j(not_equal, &loop_bottom); |
| 876 // OSR id set to none? |
| 877 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, |
| 878 SharedFunctionInfo::kOffsetToPreviousOsrAstId)); |
| 879 const int bailout_id = BailoutId::None().ToInt(); |
| 880 __ cmp(temp, Immediate(Smi::FromInt(bailout_id))); |
| 881 __ j(not_equal, &loop_bottom); |
| 882 // Literals available? |
| 883 __ mov(temp, FieldOperand(map, index, times_half_pointer_size, |
| 884 SharedFunctionInfo::kOffsetToPreviousLiterals)); |
| 885 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset)); |
| 886 __ JumpIfSmi(temp, &gotta_call_runtime); |
| 887 |
| 888 // Save the literals in the closure. |
| 889 __ mov(ecx, Operand(esp, 0)); |
| 890 __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp); |
| 891 __ push(index); |
| 892 __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index, |
| 893 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 894 __ pop(index); |
| 895 |
| 896 // Code available? |
| 897 Register entry = ecx; |
| 898 __ mov(entry, FieldOperand(map, index, times_half_pointer_size, |
| 899 SharedFunctionInfo::kOffsetToPreviousCachedCode)); |
| 900 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset)); |
| 901 __ JumpIfSmi(entry, &maybe_call_runtime); |
| 902 |
| 903 // Found literals and code. Get them into the closure and return. |
| 904 __ pop(closure); |
| 905 // Store code entry in the closure. |
| 906 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); |
| 907 |
| 908 Label install_optimized_code_and_tailcall; |
| 909 __ bind(&install_optimized_code_and_tailcall); |
| 910 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); |
| 911 __ RecordWriteCodeEntryField(closure, entry, eax); |
| 912 |
| 913 // Link the closure into the optimized function list. |
| 914 // ecx : code entry |
| 915 // edx : native context |
| 916 // edi : closure |
| 917 __ mov(ebx, |
| 918 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); |
| 919 __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx); |
| 920 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax, |
| 921 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
| 922 const int function_list_offset = |
| 923 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); |
| 924 __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), |
| 925 closure); |
| 926 // Save closure before the write barrier. |
| 927 __ mov(ebx, closure); |
| 928 __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax, |
| 929 kDontSaveFPRegs); |
| 930 __ mov(closure, ebx); |
| 931 __ pop(new_target); |
| 932 __ pop(argument_count); |
| 933 __ jmp(entry); |
| 934 |
| 935 __ bind(&loop_bottom); |
| 936 __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength))); |
| 937 __ cmp(index, Immediate(Smi::FromInt(1))); |
| 938 __ j(greater, &loop_top); |
| 939 |
| 940 // We found neither literals nor code. |
| 941 __ jmp(&gotta_call_runtime); |
| 942 |
| 943 __ bind(&maybe_call_runtime); |
| 944 __ pop(closure); |
| 945 |
| 946 // Last possibility. Check the context free optimized code map entry. |
| 947 __ mov(entry, FieldOperand(map, FixedArray::kHeaderSize + |
| 948 SharedFunctionInfo::kSharedCodeIndex)); |
| 949 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset)); |
| 950 __ JumpIfSmi(entry, &try_shared); |
| 951 |
| 952 // Store code entry in the closure. |
| 953 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); |
| 954 __ jmp(&install_optimized_code_and_tailcall); |
| 955 |
| 956 __ bind(&try_shared); |
| 957 __ pop(new_target); |
| 958 __ pop(argument_count); |
| 959 // Is the full code valid? |
| 960 __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); |
| 961 __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset)); |
| 962 __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset)); |
| 963 __ and_(ebx, Code::KindField::kMask); |
| 964 __ shr(ebx, Code::KindField::kShift); |
| 965 __ cmp(ebx, Immediate(Code::BUILTIN)); |
| 966 __ j(equal, &gotta_call_runtime_no_stack); |
| 967 // Yes, install the full code. |
| 968 __ lea(entry, FieldOperand(entry, Code::kHeaderSize)); |
| 969 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); |
| 970 __ RecordWriteCodeEntryField(closure, entry, ebx); |
| 971 __ jmp(entry); |
| 972 |
| 973 __ bind(&gotta_call_runtime); |
| 974 __ pop(closure); |
| 975 __ pop(new_target); |
| 976 __ pop(argument_count); |
| 977 __ bind(&gotta_call_runtime_no_stack); |
| 978 |
831 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | 979 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); |
832 } | 980 } |
833 | 981 |
834 | 982 |
835 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 983 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
836 GenerateTailCallToReturnedCode(masm, | 984 GenerateTailCallToReturnedCode(masm, |
837 Runtime::kCompileOptimized_NotConcurrent); | 985 Runtime::kCompileOptimized_NotConcurrent); |
838 } | 986 } |
839 | 987 |
840 | 988 |
(...skipping 1893 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2734 // And "return" to the OSR entry point of the function. | 2882 // And "return" to the OSR entry point of the function. |
2735 __ ret(0); | 2883 __ ret(0); |
2736 } | 2884 } |
2737 | 2885 |
2738 | 2886 |
2739 #undef __ | 2887 #undef __ |
2740 } // namespace internal | 2888 } // namespace internal |
2741 } // namespace v8 | 2889 } // namespace v8 |
2742 | 2890 |
2743 #endif // V8_TARGET_ARCH_X87 | 2891 #endif // V8_TARGET_ARCH_X87 |
OLD | NEW |