| OLD | NEW | 
|     1 // Copyright 2012 the V8 project authors. All rights reserved. |     1 // Copyright 2012 the V8 project authors. All rights reserved. | 
|     2 // Use of this source code is governed by a BSD-style license that can be |     2 // Use of this source code is governed by a BSD-style license that can be | 
|     3 // found in the LICENSE file. |     3 // found in the LICENSE file. | 
|     4  |     4  | 
|     5 #if V8_TARGET_ARCH_X64 |     5 #if V8_TARGET_ARCH_X64 | 
|     6  |     6  | 
|     7 #include "src/code-factory.h" |     7 #include "src/code-factory.h" | 
|     8 #include "src/codegen.h" |     8 #include "src/codegen.h" | 
|     9 #include "src/deoptimizer.h" |     9 #include "src/deoptimizer.h" | 
|    10 #include "src/full-codegen/full-codegen.h" |    10 #include "src/full-codegen/full-codegen.h" | 
| (...skipping 886 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|   897   // This simulates the initial call to bytecode handlers in interpreter entry |   897   // This simulates the initial call to bytecode handlers in interpreter entry | 
|   898   // trampoline. The return will never actually be taken, but our stack walker |   898   // trampoline. The return will never actually be taken, but our stack walker | 
|   899   // uses this address to determine whether a frame is interpreted. |   899   // uses this address to determine whether a frame is interpreted. | 
|   900   __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); |   900   __ Push(masm->isolate()->builtins()->InterpreterEntryTrampoline()); | 
|   901  |   901  | 
|   902   Generate_EnterBytecodeDispatch(masm); |   902   Generate_EnterBytecodeDispatch(masm); | 
|   903 } |   903 } | 
|   904  |   904  | 
|   905  |   905  | 
|   906 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |   906 void Builtins::Generate_CompileLazy(MacroAssembler* masm) { | 
 |   907   // ----------- S t a t e ------------- | 
 |   908   //  -- rax : argument count (preserved for callee) | 
 |   909   //  -- rdx : new target (preserved for callee) | 
 |   910   //  -- rdi : target function (preserved for callee) | 
 |   911   // ----------------------------------- | 
 |   912   // First lookup code, maybe we don't need to compile! | 
 |   913   Label gotta_call_runtime; | 
 |   914   Label maybe_call_runtime; | 
 |   915   Label try_shared; | 
 |   916   Label loop_top, loop_bottom; | 
 |   917  | 
 |   918   Register closure = rdi; | 
 |   919   Register map = r8; | 
 |   920   Register index = r9; | 
 |   921   __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | 
 |   922   __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); | 
 |   923   __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset)); | 
 |   924   __ cmpl(index, Immediate(2)); | 
 |   925   __ j(less, &gotta_call_runtime); | 
 |   926  | 
 |   927   // Find literals. | 
 |   928   // r14 : native context | 
 |   929   // r9  : length / index | 
 |   930   // r8  : optimized code map | 
 |   931   // rdx : new target | 
 |   932   // rdi : closure | 
 |   933   Register native_context = r14; | 
 |   934   __ movp(native_context, NativeContextOperand()); | 
 |   935  | 
 |   936   __ bind(&loop_top); | 
 |   937   // Native context match? | 
 |   938   Register temp = r11; | 
 |   939   __ movp(temp, FieldOperand(map, index, times_pointer_size, | 
 |   940                              SharedFunctionInfo::OffsetToPreviousContext())); | 
 |   941   __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); | 
 |   942   __ cmpp(temp, native_context); | 
 |   943   __ j(not_equal, &loop_bottom); | 
 |   944   // OSR id set to none? | 
 |   945   __ movp(temp, FieldOperand(map, index, times_pointer_size, | 
 |   946                              SharedFunctionInfo::OffsetToPreviousOsrAstId())); | 
 |   947   __ SmiToInteger32(temp, temp); | 
 |   948   const int bailout_id = BailoutId::None().ToInt(); | 
 |   949   __ cmpl(temp, Immediate(bailout_id)); | 
 |   950   __ j(not_equal, &loop_bottom); | 
 |   951   // Literals available? | 
 |   952   __ movp(temp, FieldOperand(map, index, times_pointer_size, | 
 |   953                              SharedFunctionInfo::OffsetToPreviousLiterals())); | 
 |   954   __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); | 
 |   955   __ JumpIfSmi(temp, &gotta_call_runtime); | 
 |   956  | 
 |   957   // Save the literals in the closure. | 
 |   958   __ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp); | 
 |   959   __ movp(r15, index); | 
 |   960   __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, r15, | 
 |   961                       kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 
 |   962  | 
 |   963   // Code available? | 
 |   964   Register entry = rcx; | 
 |   965   __ movp(entry, | 
 |   966           FieldOperand(map, index, times_pointer_size, | 
 |   967                        SharedFunctionInfo::OffsetToPreviousCachedCode())); | 
 |   968   __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); | 
 |   969   __ JumpIfSmi(entry, &maybe_call_runtime); | 
 |   970  | 
 |   971   // Found literals and code. Get them into the closure and return. | 
 |   972   __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | 
 |   973  | 
 |   974   Label install_optimized_code_and_tailcall; | 
 |   975   __ bind(&install_optimized_code_and_tailcall); | 
 |   976   __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); | 
 |   977   __ RecordWriteCodeEntryField(closure, entry, r15); | 
 |   978  | 
 |   979   // Link the closure into the optimized function list. | 
 |   980   // rcx : code entry (entry) | 
 |   981   // r14 : native context | 
 |   982   // rdx : new target | 
 |   983   // rdi : closure | 
 |   984   __ movp(rbx, | 
 |   985           ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); | 
 |   986   __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx); | 
 |   987   __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, r15, | 
 |   988                       kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); | 
 |   989   const int function_list_offset = | 
 |   990       Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); | 
 |   991   __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), | 
 |   992           closure); | 
 |   993   // Save closure before the write barrier. | 
 |   994   __ movp(rbx, closure); | 
 |   995   __ RecordWriteContextSlot(native_context, function_list_offset, closure, r15, | 
 |   996                             kDontSaveFPRegs); | 
 |   997   __ movp(closure, rbx); | 
 |   998   __ jmp(entry); | 
 |   999  | 
 |  1000   __ bind(&loop_bottom); | 
 |  1001   __ subl(index, Immediate(SharedFunctionInfo::kEntryLength)); | 
 |  1002   __ cmpl(index, Immediate(1)); | 
 |  1003   __ j(greater, &loop_top); | 
 |  1004  | 
 |  1005   // We found neither literals nor code. | 
 |  1006   __ jmp(&gotta_call_runtime); | 
 |  1007  | 
 |  1008   __ bind(&maybe_call_runtime); | 
 |  1009  | 
 |  1010   // Last possibility. Check the context free optimized code map entry. | 
 |  1011   __ movp(entry, FieldOperand(map, FixedArray::kHeaderSize + | 
 |  1012                                        SharedFunctionInfo::kSharedCodeIndex)); | 
 |  1013   __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); | 
 |  1014   __ JumpIfSmi(entry, &try_shared); | 
 |  1015  | 
 |  1016   // Store code entry in the closure. | 
 |  1017   __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | 
 |  1018   __ jmp(&install_optimized_code_and_tailcall); | 
 |  1019  | 
 |  1020   __ bind(&try_shared); | 
 |  1021   // Is the full code valid? | 
 |  1022   __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); | 
 |  1023   __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset)); | 
 |  1024   __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset)); | 
 |  1025   __ andl(rbx, Immediate(Code::KindField::kMask)); | 
 |  1026   __ shrl(rbx, Immediate(Code::KindField::kShift)); | 
 |  1027   __ cmpl(rbx, Immediate(Code::BUILTIN)); | 
 |  1028   __ j(equal, &gotta_call_runtime); | 
 |  1029   // Yes, install the full code. | 
 |  1030   __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); | 
 |  1031   __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); | 
 |  1032   __ RecordWriteCodeEntryField(closure, entry, r15); | 
 |  1033   __ jmp(entry); | 
 |  1034  | 
 |  1035   __ bind(&gotta_call_runtime); | 
|   907   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); |  1036   GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); | 
|   908 } |  1037 } | 
|   909  |  1038  | 
|   910  |  1039  | 
|   911 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |  1040 void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { | 
|   912   GenerateTailCallToReturnedCode(masm, |  1041   GenerateTailCallToReturnedCode(masm, | 
|   913                                  Runtime::kCompileOptimized_NotConcurrent); |  1042                                  Runtime::kCompileOptimized_NotConcurrent); | 
|   914 } |  1043 } | 
|   915  |  1044  | 
|   916  |  1045  | 
| (...skipping 1874 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
|  2791   __ ret(0); |  2920   __ ret(0); | 
|  2792 } |  2921 } | 
|  2793  |  2922  | 
|  2794  |  2923  | 
|  2795 #undef __ |  2924 #undef __ | 
|  2796  |  2925  | 
|  2797 }  // namespace internal |  2926 }  // namespace internal | 
|  2798 }  // namespace v8 |  2927 }  // namespace v8 | 
|  2799  |  2928  | 
|  2800 #endif  // V8_TARGET_ARCH_X64 |  2929 #endif  // V8_TARGET_ARCH_X64 | 
| OLD | NEW |