| Index: src/arm64/builtins-arm64.cc
|
| diff --git a/src/arm64/builtins-arm64.cc b/src/arm64/builtins-arm64.cc
|
| index 87ae660cfb4fa915917ca2b6964127890233beb6..01ac4cc5db6a3584434998612a589661e61e12d8 100644
|
| --- a/src/arm64/builtins-arm64.cc
|
| +++ b/src/arm64/builtins-arm64.cc
|
| @@ -317,7 +317,7 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
|
| __ CompareRoot(masm->StackPointer(), Heap::kStackLimitRootIndex);
|
| __ B(hs, &ok);
|
|
|
| - CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
|
| + CallRuntimePassFunction(masm, Runtime::kHiddenTryInstallOptimizedCode);
|
| GenerateTailCallToReturnedCode(masm);
|
|
|
| __ Bind(&ok);
|
| @@ -407,7 +407,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
| // as argument to the runtime call.
|
| __ Push(constructor, init_map, constructor);
|
| // The call will replace the stub, so the countdown is only done once.
|
| - __ CallRuntime(Runtime::kFinalizeInstanceSize, 1);
|
| + __ CallRuntime(Runtime::kHiddenFinalizeInstanceSize, 1);
|
| __ Pop(init_map, constructor);
|
| __ Bind(&allocate);
|
| }
|
| @@ -566,7 +566,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
| __ Peek(x4, 2 * kXRegSize);
|
| __ Push(x4);
|
| __ Push(constructor); // Argument for Runtime_NewObject.
|
| - __ CallRuntime(Runtime::kNewObjectWithAllocationSite, 2);
|
| + __ CallRuntime(Runtime::kHiddenNewObjectWithAllocationSite, 2);
|
| __ Mov(x4, x0);
|
| // If we ended up using the runtime, and we want a memento, then the
|
| // runtime call made it for us, and we shouldn't do create count
|
| @@ -574,7 +574,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
| __ jmp(&count_incremented);
|
| } else {
|
| __ Push(constructor); // Argument for Runtime_NewObject.
|
| - __ CallRuntime(Runtime::kNewObject, 1);
|
| + __ CallRuntime(Runtime::kHiddenNewObject, 1);
|
| __ Mov(x4, x0);
|
| }
|
|
|
| @@ -811,7 +811,7 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
|
|
|
|
|
| void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
|
| - CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
|
| + CallRuntimePassFunction(masm, Runtime::kHiddenCompileUnoptimized);
|
| GenerateTailCallToReturnedCode(masm);
|
| }
|
|
|
| @@ -821,11 +821,11 @@ static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
|
| Register function = x1;
|
|
|
| // Preserve function. At the same time, push arguments for
|
| - // kCompileOptimized.
|
| + // kHiddenCompileOptimized.
|
| __ LoadObject(x10, masm->isolate()->factory()->ToBoolean(concurrent));
|
| __ Push(function, function, x10);
|
|
|
| - __ CallRuntime(Runtime::kCompileOptimized, 2);
|
| + __ CallRuntime(Runtime::kHiddenCompileOptimized, 2);
|
|
|
| // Restore receiver.
|
| __ Pop(function);
|
| @@ -935,7 +935,7 @@ static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
|
| // preserve the registers with parameters.
|
| __ PushXRegList(kSafepointSavedRegisters);
|
| // Pass the function and deoptimization type to the runtime system.
|
| - __ CallRuntime(Runtime::kNotifyStubFailure, 0, save_doubles);
|
| + __ CallRuntime(Runtime::kHiddenNotifyStubFailure, 0, save_doubles);
|
| __ PopXRegList(kSafepointSavedRegisters);
|
| }
|
|
|
| @@ -965,7 +965,7 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
|
| // Pass the deoptimization type to the runtime system.
|
| __ Mov(x0, Smi::FromInt(static_cast<int>(type)));
|
| __ Push(x0);
|
| - __ CallRuntime(Runtime::kNotifyDeoptimized, 1);
|
| + __ CallRuntime(Runtime::kHiddenNotifyDeoptimized, 1);
|
| }
|
|
|
| // Get the full codegen state from the stack and untag it.
|
| @@ -1050,7 +1050,7 @@ void Builtins::Generate_OsrAfterStackCheck(MacroAssembler* masm) {
|
| __ B(hs, &ok);
|
| {
|
| FrameScope scope(masm, StackFrame::INTERNAL);
|
| - __ CallRuntime(Runtime::kStackGuard, 0);
|
| + __ CallRuntime(Runtime::kHiddenStackGuard, 0);
|
| }
|
| __ Jump(masm->isolate()->builtins()->OnStackReplacement(),
|
| RelocInfo::CODE_TARGET);
|
|
|