Index: src/x64/builtins-x64.cc |
diff --git a/src/x64/builtins-x64.cc b/src/x64/builtins-x64.cc |
index aef91640a01687f133f5dcd68770c416b278968b..d3328cb50f12f8d2ba782c9295c2db9bd14d8b4c 100644 |
--- a/src/x64/builtins-x64.cc |
+++ b/src/x64/builtins-x64.cc |
@@ -73,21 +73,25 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, |
} |
-static void CallRuntimePassFunction(MacroAssembler* masm, |
- Runtime::FunctionId function_id) { |
- FrameScope scope(masm, StackFrame::INTERNAL); |
- // Push a copy of the function onto the stack. |
- __ push(rdi); |
- // Push call kind information. |
- __ push(rcx); |
- // Function is also the parameter to the runtime call. |
- __ push(rdi); |
+static void CallRuntimePassFunctionAndTailCall( |
+ MacroAssembler* masm, Runtime::FunctionId function_id) { |
+ { FrameScope scope(masm, StackFrame::INTERNAL); |
+ // Push a copy of the function onto the stack. |
+ __ push(rdi); |
+ // Push call kind information. |
+ __ push(rcx); |
+ // Function is also the parameter to the runtime call. |
+ __ push(rdi); |
- __ CallRuntime(function_id, 1); |
- // Restore call kind information. |
- __ pop(rcx); |
- // Restore receiver. |
- __ pop(rdi); |
+ __ CallRuntime(function_id, 1); |
+ // Restore call kind information. |
+ __ pop(rcx); |
+ // Restore receiver. |
+ __ pop(rdi); |
+ } |
+ // Tail call to returned code. |
+ __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
+ __ jmp(rax); |
} |
@@ -101,7 +105,7 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
} |
-void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { |
+void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
// Checking whether the queued function is ready for install is optional, |
// since we come across interrupts and stack checks elsewhere. However, |
// not checking may delay installing ready functions, and always checking |
@@ -111,22 +115,13 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) { |
__ CompareRoot(rsp, Heap::kStackLimitRootIndex); |
__ j(above_equal, &ok); |
- CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode); |
- // Tail call to returned code. |
- __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
- __ jmp(rax); |
+ CallRuntimePassFunctionAndTailCall(masm, Runtime::kTryInstallOptimizedCode); |
__ bind(&ok); |
GenerateTailCallToSharedCode(masm); |
} |
-void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) { |
- CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile); |
- GenerateTailCallToSharedCode(masm); |
-} |
- |
- |
static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
bool is_api_function, |
bool count_constructions) { |
@@ -574,21 +569,44 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
void Builtins::Generate_LazyCompile(MacroAssembler* masm) { |
- CallRuntimePassFunction(masm, Runtime::kLazyCompile); |
- // Do a tail-call of the compiled function. |
- __ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
- __ jmp(rax); |
+ CallRuntimePassFunctionAndTailCall(masm, Runtime::kCompileUnoptimized); |
} |
-void Builtins::Generate_LazyRecompile(MacroAssembler* masm) { |
- CallRuntimePassFunction(masm, Runtime::kLazyRecompile); |
- // Do a tail-call of the compiled function. |
+static void CallCompileOptimizedAndTailCall(MacroAssembler* masm, |
+ bool concurrent) { |
+ { FrameScope scope(masm, StackFrame::INTERNAL); |
+ // Push a copy of the function onto the stack. |
+ __ push(rdi); |
+ // Push call kind information. |
+ __ push(rcx); |
+ // Function is also the parameter to the runtime call. |
+ __ push(rdi); |
+ // Whether to compile in a background thread. |
+ __ Push(masm->isolate()->factory()->ToBoolean(concurrent)); |
+ |
+ __ CallRuntime(Runtime::kCompileOptimized, 2); |
+ // Restore call kind information. |
+ __ pop(rcx); |
+ // Restore receiver. |
+ __ pop(rdi); |
+ } |
+ // Tail call to returned code. |
__ lea(rax, FieldOperand(rax, Code::kHeaderSize)); |
__ jmp(rax); |
} |
+void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
+ CallCompileOptimizedAndTailCall(masm, false); |
+} |
+ |
+ |
+void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
+ CallCompileOptimizedAndTailCall(masm, true); |
+} |
+ |
+ |
static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
// For now, we are relying on the fact that make_code_young doesn't do any |
// garbage collection which allows us to save/restore the registers without |