Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(447)

Side by Side Diff: src/ia32/builtins-ia32.cc

Issue 1670143002: Visit the Optimized Code Map on first call rather than closure creation. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix mips64 rebase error. Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_IA32 5 #if V8_TARGET_ARCH_IA32
6 6
7 #include "src/code-factory.h" 7 #include "src/code-factory.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 826 matching lines...) Expand 10 before | Expand all | Expand 10 after
837 // This simulates the initial call to bytecode handlers in interpreter entry 837 // This simulates the initial call to bytecode handlers in interpreter entry
838 // trampoline. The return will never actually be taken, but our stack walker 838 // trampoline. The return will never actually be taken, but our stack walker
839 // uses this address to determine whether a frame is interpreted. 839 // uses this address to determine whether a frame is interpreted.
840 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); 840 __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline());
841 841
842 Generate_EnterBytecodeDispatch(masm); 842 Generate_EnterBytecodeDispatch(masm);
843 } 843 }
844 844
845 845
846 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { 846 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
847 // ----------- S t a t e -------------
848 // -- eax : argument count (preserved for callee)
849 // -- edx : new target (preserved for callee)
850 // -- edi : target function (preserved for callee)
851 // -----------------------------------
852 // First lookup code, maybe we don't need to compile!
853 Label gotta_call_runtime, gotta_call_runtime_no_stack;
854 Label maybe_call_runtime;
855 Label try_shared;
856 Label loop_top, loop_bottom;
857
858 Register closure = edi;
859 Register new_target = edx;
860 Register argument_count = eax;
861
862 __ push(argument_count);
863 __ push(new_target);
864 __ push(closure);
865
866 Register map = argument_count;
867 Register index = ebx;
868 __ mov(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
869 __ mov(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
870 __ mov(index, FieldOperand(map, FixedArray::kLengthOffset));
871 __ cmp(index, Immediate(Smi::FromInt(2)));
872 __ j(less, &gotta_call_runtime);
873
874 // Find literals.
875 // edx : native context
876 // ebx : length / index
877 // eax : optimized code map
878 // stack[0] : new target
879 // stack[4] : closure
880 Register native_context = edx;
881 __ mov(native_context, NativeContextOperand());
882
883 __ bind(&loop_top);
884 Register temp = edi;
885
886 // Does the native context match?
887 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
888 SharedFunctionInfo::OffsetToPreviousContext()));
889 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
890 __ cmp(temp, native_context);
891 __ j(not_equal, &loop_bottom);
892 // OSR id set to none?
893 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
894 SharedFunctionInfo::OffsetToPreviousOsrAstId()));
895 const int bailout_id = BailoutId::None().ToInt();
896 __ cmp(temp, Immediate(Smi::FromInt(bailout_id)));
897 __ j(not_equal, &loop_bottom);
898 // Literals available?
899 __ mov(temp, FieldOperand(map, index, times_half_pointer_size,
900 SharedFunctionInfo::OffsetToPreviousLiterals()));
901 __ mov(temp, FieldOperand(temp, WeakCell::kValueOffset));
902 __ JumpIfSmi(temp, &gotta_call_runtime);
903
904 // Save the literals in the closure.
905 __ mov(ecx, Operand(esp, 0));
906 __ mov(FieldOperand(ecx, JSFunction::kLiteralsOffset), temp);
907 __ push(index);
908 __ RecordWriteField(ecx, JSFunction::kLiteralsOffset, temp, index,
909 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
910 __ pop(index);
911
912 // Code available?
913 Register entry = ecx;
914 __ mov(entry, FieldOperand(map, index, times_half_pointer_size,
915 SharedFunctionInfo::OffsetToPreviousCachedCode()));
916 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
917 __ JumpIfSmi(entry, &maybe_call_runtime);
918
919 // Found literals and code. Get them into the closure and return.
920 __ pop(closure);
921 // Store code entry in the closure.
922 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
923
924 Label install_optimized_code_and_tailcall;
925 __ bind(&install_optimized_code_and_tailcall);
926 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
927 __ RecordWriteCodeEntryField(closure, entry, eax);
928
929 // Link the closure into the optimized function list.
930 // ecx : code entry
931 // edx : native context
932 // edi : closure
933 __ mov(ebx,
934 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
935 __ mov(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), ebx);
936 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, ebx, eax,
937 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
938 const int function_list_offset =
939 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
940 __ mov(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
941 closure);
942 // Save closure before the write barrier.
943 __ mov(ebx, closure);
944 __ RecordWriteContextSlot(native_context, function_list_offset, closure, eax,
945 kDontSaveFPRegs);
946 __ mov(closure, ebx);
947 __ pop(new_target);
948 __ pop(argument_count);
949 __ jmp(entry);
950
951 __ bind(&loop_bottom);
952 __ sub(index, Immediate(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
953 __ cmp(index, Immediate(Smi::FromInt(1)));
954 __ j(greater, &loop_top);
955
956 // We found neither literals nor code.
957 __ jmp(&gotta_call_runtime);
958
959 __ bind(&maybe_call_runtime);
960 __ pop(closure);
961
962 // Last possibility. Check the context free optimized code map entry.
963 __ mov(entry, FieldOperand(map, FixedArray::kHeaderSize +
964 SharedFunctionInfo::kSharedCodeIndex));
965 __ mov(entry, FieldOperand(entry, WeakCell::kValueOffset));
966 __ JumpIfSmi(entry, &try_shared);
967
968 // Store code entry in the closure.
969 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
970 __ jmp(&install_optimized_code_and_tailcall);
971
972 __ bind(&try_shared);
973 __ pop(new_target);
974 __ pop(argument_count);
975 // Is the full code valid?
976 __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
977 __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
978 __ mov(ebx, FieldOperand(entry, Code::kFlagsOffset));
979 __ and_(ebx, Code::KindField::kMask);
980 __ shr(ebx, Code::KindField::kShift);
981 __ cmp(ebx, Immediate(Code::BUILTIN));
982 __ j(equal, &gotta_call_runtime_no_stack);
983 // Yes, install the full code.
984 __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
985 __ mov(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
986 __ RecordWriteCodeEntryField(closure, entry, ebx);
987 __ jmp(entry);
988
989 __ bind(&gotta_call_runtime);
990 __ pop(closure);
991 __ pop(new_target);
992 __ pop(argument_count);
993 __ bind(&gotta_call_runtime_no_stack);
994
847 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); 995 GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
848 } 996 }
849 997
850 998
851 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 999 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
852 GenerateTailCallToReturnedCode(masm, 1000 GenerateTailCallToReturnedCode(masm,
853 Runtime::kCompileOptimized_NotConcurrent); 1001 Runtime::kCompileOptimized_NotConcurrent);
854 } 1002 }
855 1003
856 1004
(...skipping 1867 matching lines...) Expand 10 before | Expand all | Expand 10 after
2724 // And "return" to the OSR entry point of the function. 2872 // And "return" to the OSR entry point of the function.
2725 __ ret(0); 2873 __ ret(0);
2726 } 2874 }
2727 2875
2728 2876
2729 #undef __ 2877 #undef __
2730 } // namespace internal 2878 } // namespace internal
2731 } // namespace v8 2879 } // namespace v8
2732 2880
2733 #endif // V8_TARGET_ARCH_IA32 2881 #endif // V8_TARGET_ARCH_IA32
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698