Index: src/builtins/arm64/builtins-arm64.cc |
diff --git a/src/arm64/builtins-arm64.cc b/src/builtins/arm64/builtins-arm64.cc |
similarity index 98% |
rename from src/arm64/builtins-arm64.cc |
rename to src/builtins/arm64/builtins-arm64.cc |
index 946ae2814af91ae5cb21b66868da0b1a1d9de7fa..12512cedbbefb938d847f876d2b8922dc959f57c 100644 |
--- a/src/arm64/builtins-arm64.cc |
+++ b/src/builtins/arm64/builtins-arm64.cc |
@@ -14,17 +14,14 @@ |
namespace v8 { |
namespace internal { |
- |
#define __ ACCESS_MASM(masm) |
- |
// Load the built-in Array function from the current context. |
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { |
// Load the InternalArray function from the native context. |
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); |
} |
- |
// Load the built-in InternalArray function from the current context. |
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, |
Register result) { |
@@ -65,7 +62,6 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, |
exit_frame_type == BUILTIN_EXIT); |
} |
- |
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- x0 : number of arguments |
@@ -93,7 +89,6 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
__ TailCallStub(&stub); |
} |
- |
void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- x0 : number of arguments |
@@ -122,7 +117,6 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
__ TailCallStub(&stub); |
} |
- |
// static |
void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { |
// ----------- S t a t e ------------- |
@@ -259,7 +253,6 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { |
__ Ret(); |
} |
- |
// static |
void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -341,7 +334,6 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { |
} |
} |
- |
// static |
void Builtins::Generate_StringConstructor(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -413,7 +405,6 @@ void Builtins::Generate_StringConstructor(MacroAssembler* masm) { |
} |
} |
- |
// static |
void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -530,7 +521,6 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, |
__ Br(x2); |
} |
- |
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
// Checking whether the queued function is ready for install is optional, |
// since we come across interrupts and stack checks elsewhere. However, not |
@@ -547,7 +537,6 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
GenerateTailCallToSharedCode(masm); |
} |
- |
static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
bool is_api_function, |
bool create_implicit_receiver, |
@@ -716,28 +705,23 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
__ Ret(); |
} |
- |
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { |
Generate_JSConstructStubHelper(masm, false, true, false); |
} |
- |
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { |
Generate_JSConstructStubHelper(masm, true, false, false); |
} |
- |
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { |
Generate_JSConstructStubHelper(masm, false, false, false); |
} |
- |
void Builtins::Generate_JSBuiltinsConstructStubForDerived( |
MacroAssembler* masm) { |
Generate_JSConstructStubHelper(masm, false, false, true); |
} |
- |
void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { |
FrameScope scope(masm, StackFrame::INTERNAL); |
__ Push(x1); |
@@ -818,8 +802,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { |
// New-style (ignition/turbofan) generator object |
{ |
__ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); |
- __ Ldr(w0, |
- FieldMemOperand(x0, SharedFunctionInfo::kFormalParameterCountOffset)); |
+ __ Ldr(w0, FieldMemOperand( |
+ x0, SharedFunctionInfo::kFormalParameterCountOffset)); |
// We abuse new.target both to indicate that this is a resume call and to |
// pass in the generator object. In ordinary calls, new.target is always |
// undefined because generator functions are non-constructable. |
@@ -863,8 +847,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { |
__ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset)); |
__ Ldr(x10, FieldMemOperand(x10, SharedFunctionInfo::kCodeOffset)); |
__ Add(x10, x10, Code::kHeaderSize - kHeapObjectTag); |
- __ Ldrsw(x11, |
- UntagSmiFieldMemOperand(x1, JSGeneratorObject::kContinuationOffset)); |
+ __ Ldrsw(x11, UntagSmiFieldMemOperand( |
+ x1, JSGeneratorObject::kContinuationOffset)); |
__ Add(x10, x10, x11); |
__ Mov(x12, Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)); |
__ Str(x12, FieldMemOperand(x1, JSGeneratorObject::kContinuationOffset)); |
@@ -895,7 +879,6 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { |
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
- |
// Clobbers x10, x15; preserves all other registers. |
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
IsTagged argc_is_tagged) { |
@@ -925,7 +908,6 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
__ Bind(&enough_stack_space); |
} |
- |
// Input: |
// x0: new.target. |
// x1: function. |
@@ -975,7 +957,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
__ Bind(&loop); |
__ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex)); |
__ Ldr(x12, MemOperand(x11)); // Dereference the handle. |
- __ Push(x12); // Push the argument. |
+ __ Push(x12); // Push the argument. |
__ Bind(&entry); |
__ Cmp(scratch, argv); |
__ B(ne, &loop); |
@@ -1015,12 +997,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
__ Ret(); |
} |
- |
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
Generate_JSEntryTrampolineHelper(masm, false); |
} |
- |
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
Generate_JSEntryTrampolineHelper(masm, true); |
} |
@@ -1471,7 +1451,6 @@ void Builtins::Generate_CompileOptimized(MacroAssembler* masm) { |
Runtime::kCompileOptimized_NotConcurrent); |
} |
- |
void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) { |
GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent); |
} |
@@ -1542,19 +1521,18 @@ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
__ Br(x0); |
} |
-#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ |
-void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ |
- MacroAssembler* masm) { \ |
- GenerateMakeCodeYoungAgainCommon(masm); \ |
-} \ |
-void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ |
- MacroAssembler* masm) { \ |
- GenerateMakeCodeYoungAgainCommon(masm); \ |
-} |
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ |
+ void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ |
+ MacroAssembler* masm) { \ |
+ GenerateMakeCodeYoungAgainCommon(masm); \ |
+ } \ |
+ void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ |
+ MacroAssembler* masm) { \ |
+ GenerateMakeCodeYoungAgainCommon(masm); \ |
+ } |
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) |
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR |
- |
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
// For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact |
// that make_code_young doesn't do any garbage collection which allows us to |
@@ -1574,8 +1552,8 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
__ Push(x0, x1, x3, fp, lr); |
__ Mov(x1, ExternalReference::isolate_address(masm->isolate())); |
__ CallCFunction( |
- ExternalReference::get_mark_code_as_executed_function( |
- masm->isolate()), 2); |
+ ExternalReference::get_mark_code_as_executed_function(masm->isolate()), |
+ 2); |
__ Pop(lr, fp, x3, x1, x0); |
// Perform prologue operations usually performed by the young code stub. |
@@ -1587,17 +1565,14 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
__ Br(x0); |
} |
- |
void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { |
GenerateMakeCodeYoungAgainCommon(masm); |
} |
- |
void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { |
Generate_MarkCodeAsExecutedOnce(masm); |
} |
- |
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
SaveFPRegsMode save_doubles) { |
{ |
@@ -1623,17 +1598,14 @@ static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
__ Br(lr); |
} |
- |
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
} |
- |
void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
} |
- |
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
Deoptimizer::BailoutType type) { |
{ |
@@ -1671,22 +1643,18 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
__ Abort(kInvalidFullCodegenState); |
} |
- |
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
} |
- |
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
} |
- |
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { |
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); |
} |
- |
static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, |
Register function_template_info, |
Register scratch0, Register scratch1, |
@@ -1750,7 +1718,6 @@ static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, |
__ Bind(&receiver_check_passed); |
} |
- |
void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- x0 : number of arguments excluding receiver |
@@ -1786,7 +1753,6 @@ void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { |
__ TailCallRuntime(Runtime::kThrowIllegalInvocation); |
} |
- |
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
// Lookup the function in the JavaScript frame. |
__ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
@@ -1810,8 +1776,9 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
// Load the OSR entrypoint offset from the deoptimization data. |
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] |
- __ Ldrsw(w1, UntagSmiFieldMemOperand(x1, FixedArray::OffsetOfElementAt( |
- DeoptimizationInputData::kOsrPcOffsetIndex))); |
+ __ Ldrsw(w1, UntagSmiFieldMemOperand( |
+ x1, FixedArray::OffsetOfElementAt( |
+ DeoptimizationInputData::kOsrPcOffsetIndex))); |
// Compute the target address = code_obj + header_size + osr_offset |
// <entry_addr> = <code_obj> + #header_size + <osr_offset> |
@@ -1822,7 +1789,6 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
__ Ret(); |
} |
- |
// static |
void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, |
int field_index) { |
@@ -1964,7 +1930,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { |
} |
} |
- |
// static |
void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { |
Register argc = x0; |
@@ -2011,7 +1976,6 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { |
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
} |
- |
void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- x0 : argc |
@@ -2082,7 +2046,6 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
} |
} |
- |
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- x0 : argc |
@@ -2169,7 +2132,6 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
} |
} |
- |
static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
Label* stack_overflow) { |
// ----------- S t a t e ------------- |
@@ -2191,7 +2153,6 @@ static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
__ B(le, stack_overflow); |
} |
- |
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
__ SmiTag(x10, x0); |
__ Mov(x11, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); |
@@ -2201,7 +2162,6 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize); |
} |
- |
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- x0 : result being passed through |
@@ -2216,7 +2176,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
__ Drop(1); |
} |
- |
// static |
void Builtins::Generate_Apply(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2557,7 +2516,6 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, |
} |
} |
- |
namespace { |
void Generate_PushBoundArguments(MacroAssembler* masm) { |
@@ -2635,7 +2593,6 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { |
} // namespace |
- |
// static |
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, |
TailCallMode tail_call_mode) { |
@@ -2665,7 +2622,6 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, |
__ Br(x12); |
} |
- |
// static |
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, |
TailCallMode tail_call_mode) { |
@@ -2725,7 +2681,6 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, |
} |
} |
- |
// static |
void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2747,7 +2702,6 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
__ Br(x4); |
} |
- |
// static |
void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2778,7 +2732,6 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { |
__ Br(x12); |
} |
- |
// static |
void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2798,7 +2751,6 @@ void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { |
ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); |
} |
- |
// static |
void Builtins::Generate_Construct(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2961,7 +2913,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
// -- x3 : new target (passed through to callee) |
// ----------------------------------- |
- Register argc_actual = x0; // Excluding the receiver. |
+ Register argc_actual = x0; // Excluding the receiver. |
Register argc_expected = x2; // Excluding the receiver. |
Register function = x1; |
Register code_entry = x10; |
@@ -3002,9 +2954,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
Label copy_2_by_2; |
__ Bind(©_2_by_2); |
__ Ldp(scratch1, scratch2, |
- MemOperand(copy_start, - 2 * kPointerSize, PreIndex)); |
+ MemOperand(copy_start, -2 * kPointerSize, PreIndex)); |
__ Stp(scratch1, scratch2, |
- MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); |
+ MemOperand(copy_to, -2 * kPointerSize, PreIndex)); |
__ Cmp(copy_start, copy_end); |
__ B(hi, ©_2_by_2); |
@@ -3032,7 +2984,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
__ Add(copy_from, fp, 3 * kPointerSize); |
__ Add(copy_from, copy_from, argc_actual); |
__ Mov(copy_to, jssp); |
- __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver. |
+ __ Sub(copy_end, copy_to, 1 * kPointerSize); // Adjust for the receiver. |
__ Sub(copy_end, copy_end, argc_actual); |
// Claim space for the arguments, the receiver, and one extra slot. |
@@ -3045,9 +2997,9 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
Label copy_2_by_2; |
__ Bind(©_2_by_2); |
__ Ldp(scratch1, scratch2, |
- MemOperand(copy_from, - 2 * kPointerSize, PreIndex)); |
+ MemOperand(copy_from, -2 * kPointerSize, PreIndex)); |
__ Stp(scratch1, scratch2, |
- MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); |
+ MemOperand(copy_to, -2 * kPointerSize, PreIndex)); |
__ Cmp(copy_to, copy_end); |
__ B(hi, ©_2_by_2); |
@@ -3060,7 +3012,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
Label fill; |
__ Bind(&fill); |
__ Stp(scratch1, scratch1, |
- MemOperand(copy_to, - 2 * kPointerSize, PreIndex)); |
+ MemOperand(copy_to, -2 * kPointerSize, PreIndex)); |
__ Cmp(copy_to, copy_end); |
__ B(hi, &fill); |
@@ -3097,7 +3049,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
} |
} |
- |
#undef __ |
} // namespace internal |