Index: src/arm64/builtins-arm64.cc |
diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc |
index c596dc049e9e52e9751bc9249483ccbda4567d5e..4cf9be61cc0e284b0f872fc9a26e9b2576e9c7be 100644 |
--- a/src/arm64/builtins-arm64.cc |
+++ b/src/arm64/builtins-arm64.cc |
@@ -455,40 +455,40 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { |
__ Ret(); |
} |
+static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
+ __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); |
+ __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset)); |
+ __ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag); |
+ __ Br(x2); |
+} |
-static void CallRuntimePassFunction(MacroAssembler* masm, |
- Runtime::FunctionId function_id) { |
+static void GenerateTailCallToReturnedCode(MacroAssembler* masm, |
+ Runtime::FunctionId function_id) { |
// ----------- S t a t e ------------- |
+ // -- x0 : argument count (preserved for callee) |
// -- x1 : target function (preserved for callee) |
// -- x3 : new target (preserved for callee) |
// ----------------------------------- |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ // Push a copy of the target function and the new target. |
+ // Push another copy as a parameter to the runtime call. |
+ __ SmiTag(x0); |
+ __ Push(x0, x1, x3, x1); |
- FrameScope scope(masm, StackFrame::INTERNAL); |
- // Push a copy of the target function and the new target. |
- // Push another copy as a parameter to the runtime call. |
- __ Push(x1, x3, x1); |
- |
- __ CallRuntime(function_id, 1); |
- |
- // Restore target function and new target. |
- __ Pop(x3, x1); |
-} |
+ __ CallRuntime(function_id, 1); |
+ __ Move(x2, x0); |
+ // Restore target function and new target. |
+ __ Pop(x3, x1, x0); |
+ __ SmiUntag(x0); |
+ } |
-static void GenerateTailCallToSharedCode(MacroAssembler* masm) { |
- __ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset)); |
- __ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset)); |
__ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag); |
__ Br(x2); |
} |
-static void GenerateTailCallToReturnedCode(MacroAssembler* masm) { |
- __ Add(x0, x0, Code::kHeaderSize - kHeapObjectTag); |
- __ Br(x0); |
-} |
- |
- |
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
// Checking whether the queued function is ready for install is optional, |
// since we come across interrupts and stack checks elsewhere. However, not |
@@ -499,8 +499,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
__ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex); |
__ B(hs, &ok); |
- CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode); |
- GenerateTailCallToReturnedCode(masm); |
+ GenerateTailCallToReturnedCode(masm, Runtime::kTryInstallOptimizedCode); |
__ Bind(&ok); |
GenerateTailCallToSharedCode(masm); |
@@ -1194,20 +1193,18 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) { |
void Builtins::Generate_CompileLazy(MacroAssembler* masm) { |
- CallRuntimePassFunction(masm, Runtime::kCompileLazy); |
- GenerateTailCallToReturnedCode(masm); |
+ GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy); |
} |
void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
- CallRuntimePassFunction(masm, Runtime::kCompileOptimized_NotConcurrent); |
- GenerateTailCallToReturnedCode(masm); |
+ GenerateTailCallToReturnedCode(masm, |
+ Runtime::kCompileOptimized_NotConcurrent); |
} |
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
- CallRuntimePassFunction(masm, Runtime::kCompileOptimized_Concurrent); |
- GenerateTailCallToReturnedCode(masm); |
+ GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); |
} |