Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(604)

Side by Side Diff: src/x64/builtins-x64.cc

Issue 1563213002: Type Feedback Vector lives in the closure (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Ports. Created 4 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #if V8_TARGET_ARCH_X64 5 #if V8_TARGET_ARCH_X64
6 6
7 #include "src/code-factory.h" 7 #include "src/code-factory.h"
8 #include "src/codegen.h" 8 #include "src/codegen.h"
9 #include "src/deoptimizer.h" 9 #include "src/deoptimizer.h"
10 #include "src/full-codegen/full-codegen.h" 10 #include "src/full-codegen/full-codegen.h"
(...skipping 861 matching lines...) Expand 10 before | Expand all | Expand 10 after
872 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); 872 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
873 } 873 }
874 874
875 875
876 void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) { 876 void Builtins::Generate_InterpreterNotifyLazyDeoptimized(MacroAssembler* masm) {
877 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); 877 Generate_InterpreterNotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
878 } 878 }
879 879
880 880
881 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { 881 void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
882 // ----------- S t a t e -------------
883 // -- rdx : new target (preserved for callee)
884 // -- rdi : target function (preserved for callee)
885 // -----------------------------------
886 // First lookup code, maybe we don't need to compile!
887 Label gotta_call_runtime;
888 Label maybe_call_runtime;
889 Label try_shared;
890 Label loop_top, loop_bottom;
891
892 Register closure = rdi;
893 Register map = r8;
894 Register index = r9;
895 __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
896 __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset));
897 __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset));
898 __ cmpl(index, Immediate(2));
899 __ j(less, &gotta_call_runtime);
900
901 // Find literals.
902 // r14 : native context
903 // r9 : length / index
904 // r8 : optimized code map
905 // rdx : new target
906 // rdi : closure
907 Register native_context = r14;
908 __ movp(native_context, NativeContextOperand());
909
910 __ bind(&loop_top);
911 // Native context match?
912 Register temp = r11;
913 __ movp(temp, FieldOperand(map, index, times_pointer_size,
914 SharedFunctionInfo::OffsetToPreviousContext()));
915 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
916 __ cmpp(temp, native_context);
917 __ j(not_equal, &loop_bottom);
918 // OSR id set to none?
919 __ movp(temp, FieldOperand(map, index, times_pointer_size,
920 SharedFunctionInfo::OffsetToPreviousOsrAstId()));
921 __ SmiToInteger32(temp, temp);
922 const int bailout_id = BailoutId::None().ToInt();
923 __ cmpl(temp, Immediate(bailout_id));
924 __ j(not_equal, &loop_bottom);
925 // Literals available?
926 __ movp(temp, FieldOperand(map, index, times_pointer_size,
927 SharedFunctionInfo::OffsetToPreviousLiterals()));
928 __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset));
929 __ JumpIfSmi(temp, &gotta_call_runtime);
930
931 // Save the literals in the closure.
932 __ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp);
933 __ movp(rax, index);
934 __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, rax,
935 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
936
937 // Code available?
938 Register entry = rcx;
939 __ movp(entry,
940 FieldOperand(map, index, times_pointer_size,
941 SharedFunctionInfo::OffsetToPreviousCachedCode()));
942 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
943 __ JumpIfSmi(entry, &maybe_call_runtime);
944
945 // Found literals and code. Get them into the closure and return.
946 __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
947
948 Label install_optimized_code_and_tailcall;
949 __ bind(&install_optimized_code_and_tailcall);
950 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
951
952 // Link the closure into the optimized function list.
953 // rcx : code entry (entry)
954 // r14 : native context
955 // rdx : new target
956 // rdi : closure
957 __ movp(rbx,
958 ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
959 __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx);
960 __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, rax,
961 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
962 const int function_list_offset =
963 Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
964 __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST),
965 closure);
966 // Save closure before the write barrier.
967 __ movp(rbx, closure);
968 __ RecordWriteContextSlot(native_context, function_list_offset, closure, rax,
969 kDontSaveFPRegs);
970 __ movp(closure, rbx);
971 __ jmp(entry);
972
973 __ bind(&loop_bottom);
974 __ subl(index, Immediate(SharedFunctionInfo::kEntryLength));
975 __ cmpl(index, Immediate(1));
976 __ j(greater, &loop_top);
977
978 // We suck. We didn't even find literals. Hating life, we will go to the
979 // runtime.
980 __ jmp(&gotta_call_runtime);
981
982 __ bind(&maybe_call_runtime);
983
984 // Last possibility. Check the context free optimized code map entry.
985 __ movp(entry, FieldOperand(map, FixedArray::kHeaderSize));
986 __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset));
987 __ JumpIfSmi(entry, &try_shared);
988
989 // Store code entry in the closure.
990 __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
991 __ jmp(&install_optimized_code_and_tailcall);
992
993 __ bind(&try_shared);
994 // Is the full code valid?
995 __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
996 __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset));
997 __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset));
998 __ andl(rbx, Immediate(Code::KindField::kMask));
999 __ shrl(rbx, Immediate(Code::KindField::kShift));
1000 __ cmpl(rbx, Immediate(Code::BUILTIN));
1001 __ j(equal, &gotta_call_runtime);
1002 // Yes, install the full code.
1003 __ leap(entry, FieldOperand(entry, Code::kHeaderSize));
1004 __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry);
1005 __ jmp(entry);
1006
1007 __ bind(&gotta_call_runtime);
882 CallRuntimePassFunction(masm, Runtime::kCompileLazy); 1008 CallRuntimePassFunction(masm, Runtime::kCompileLazy);
883 GenerateTailCallToReturnedCode(masm); 1009 GenerateTailCallToReturnedCode(masm);
884 } 1010 }
885 1011
886 1012
887 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { 1013 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
888 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); 1014 CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent);
889 GenerateTailCallToReturnedCode(masm); 1015 GenerateTailCallToReturnedCode(masm);
890 } 1016 }
891 1017
(...skipping 1611 matching lines...) Expand 10 before | Expand all | Expand 10 after
2503 __ ret(0); 2629 __ ret(0);
2504 } 2630 }
2505 2631
2506 2632
2507 #undef __ 2633 #undef __
2508 2634
2509 } // namespace internal 2635 } // namespace internal
2510 } // namespace v8 2636 } // namespace v8
2511 2637
2512 #endif // V8_TARGET_ARCH_X64 2638 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698