Index: src/x64/builtins-x64.cc |
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc |
index 89e8078c2035f629adc2d70f5661da363461248f..a4851db55809eee9c48f770233a0c7c5d0d47c9d 100644 |
--- a/src/x64/builtins-x64.cc |
+++ b/src/x64/builtins-x64.cc |
@@ -913,6 +913,132 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { |
void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
+ // ----------- S t a t e ------------- |
+ // -- rdx : new target (preserved for callee) |
+ // -- rdi : target function (preserved for callee) |
+ // ----------------------------------- |
+ // First lookup code, maybe we don't need to compile! |
+ Label gotta_call_runtime; |
+ Label maybe_call_runtime; |
+ Label try_shared; |
+ Label loop_top, loop_bottom; |
+ |
+ Register closure = rdi; |
+ Register map = r8; |
+ Register index = r9; |
+ __ movp(map, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); |
+ __ movp(map, FieldOperand(map, SharedFunctionInfo::kOptimizedCodeMapOffset)); |
+ __ SmiToInteger32(index, FieldOperand(map, FixedArray::kLengthOffset)); |
+ __ cmpl(index, Immediate(2)); |
+ __ j(less, &gotta_call_runtime); |
+ |
+ // Find literals. |
+ // r14 : native context |
+ // r9 : length / index |
+ // r8 : optimized code map |
+ // rdx : new target |
+ // rdi : closure |
+ Register native_context = r14; |
+ __ movp(native_context, NativeContextOperand()); |
+ |
+ __ bind(&loop_top); |
+ // Native context match? |
+ Register temp = r11; |
+ __ movp(temp, FieldOperand(map, index, times_pointer_size, |
+ SharedFunctionInfo::OffsetToPreviousContext())); |
+ __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); |
+ __ cmpp(temp, native_context); |
+ __ j(not_equal, &loop_bottom); |
+ // OSR id set to none? |
+ __ movp(temp, FieldOperand(map, index, times_pointer_size, |
+ SharedFunctionInfo::OffsetToPreviousOsrAstId())); |
+ __ SmiToInteger32(temp, temp); |
+ const int bailout_id = BailoutId::None().ToInt(); |
+ __ cmpl(temp, Immediate(bailout_id)); |
+ __ j(not_equal, &loop_bottom); |
+ // Literals available? |
+ __ movp(temp, FieldOperand(map, index, times_pointer_size, |
+ SharedFunctionInfo::OffsetToPreviousLiterals())); |
+ __ movp(temp, FieldOperand(temp, WeakCell::kValueOffset)); |
+ __ JumpIfSmi(temp, &gotta_call_runtime); |
+ |
+ // Save the literals in the closure. |
+ __ movp(FieldOperand(closure, JSFunction::kLiteralsOffset), temp); |
+ __ movp(rax, index); |
+ __ RecordWriteField(closure, JSFunction::kLiteralsOffset, temp, rax, |
+ kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
+ |
+ // Code available? |
+ Register entry = rcx; |
+ __ movp(entry, |
+ FieldOperand(map, index, times_pointer_size, |
+ SharedFunctionInfo::OffsetToPreviousCachedCode())); |
+ __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); |
+ __ JumpIfSmi(entry, &maybe_call_runtime); |
+ |
+ // Found literals and code. Get them into the closure and return. |
+ __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); |
+ |
+ Label install_optimized_code_and_tailcall; |
+ __ bind(&install_optimized_code_and_tailcall); |
+ __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); |
+ |
+ // Link the closure into the optimized function list. |
+ // rcx : code entry (entry) |
+ // r14 : native context |
+ // rdx : new target |
+ // rdi : closure |
+ __ movp(rbx, |
+ ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST)); |
+ __ movp(FieldOperand(closure, JSFunction::kNextFunctionLinkOffset), rbx); |
+ __ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, rbx, rax, |
+ kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK); |
+ const int function_list_offset = |
+ Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST); |
+ __ movp(ContextOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST), |
+ closure); |
+ // Save closure before the write barrier. |
+ __ movp(rbx, closure); |
+ __ RecordWriteContextSlot(native_context, function_list_offset, closure, rax, |
+ kDontSaveFPRegs); |
+ __ movp(closure, rbx); |
+ __ jmp(entry); |
+ |
+ __ bind(&loop_bottom); |
+ __ subl(index, Immediate(SharedFunctionInfo::kEntryLength)); |
+ __ cmpl(index, Immediate(1)); |
+ __ j(greater, &loop_top); |
+ |
+ // We found neither literals nor code. |
+ __ jmp(&gotta_call_runtime); |
+ |
+ __ bind(&maybe_call_runtime); |
+ |
+ // Last possibility. Check the context free optimized code map entry. |
+ __ movp(entry, FieldOperand(map, FixedArray::kHeaderSize + |
+ SharedFunctionInfo::kSharedCodeIndex)); |
+ __ movp(entry, FieldOperand(entry, WeakCell::kValueOffset)); |
+ __ JumpIfSmi(entry, &try_shared); |
+ |
+ // Store code entry in the closure. |
+ __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); |
+ __ jmp(&install_optimized_code_and_tailcall); |
+ |
+ __ bind(&try_shared); |
+ // Is the full code valid? |
+ __ movp(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset)); |
+ __ movp(entry, FieldOperand(entry, SharedFunctionInfo::kCodeOffset)); |
+ __ movl(rbx, FieldOperand(entry, Code::kFlagsOffset)); |
+ __ andl(rbx, Immediate(Code::KindField::kMask)); |
+ __ shrl(rbx, Immediate(Code::KindField::kShift)); |
+ __ cmpl(rbx, Immediate(Code::BUILTIN)); |
+ __ j(equal, &gotta_call_runtime); |
+ // Yes, install the full code. |
+ __ leap(entry, FieldOperand(entry, Code::kHeaderSize)); |
+ __ movp(FieldOperand(closure, JSFunction::kCodeEntryOffset), entry); |
+ __ jmp(entry); |
+ |
+ __ bind(&gotta_call_runtime); |
CallRuntimePassFunction(masm, Runtime::kCompileLazy); |
GenerateTailCallToReturnedCode(masm); |
} |