Index: src/builtins/mips64/builtins-mips64.cc |
diff --git a/src/mips64/builtins-mips64.cc b/src/builtins/mips64/builtins-mips64.cc |
similarity index 98% |
rename from src/mips64/builtins-mips64.cc |
rename to src/builtins/mips64/builtins-mips64.cc |
index 73cece633f9d3f7fa0a128d280bb5d773942ee54..9680dbdbf56dab47131cfb1fb978f9da39cbb737 100644 |
--- a/src/mips64/builtins-mips64.cc |
+++ b/src/builtins/mips64/builtins-mips64.cc |
@@ -13,7 +13,6 @@ |
namespace v8 { |
namespace internal { |
- |
#define __ ACCESS_MASM(masm) |
void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, |
@@ -49,7 +48,6 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id, |
exit_frame_type == BUILTIN_EXIT); |
} |
- |
// Load the built-in InternalArray function from the current context. |
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, |
Register result) { |
@@ -57,14 +55,12 @@ static void GenerateLoadInternalArrayFunction(MacroAssembler* masm, |
__ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result); |
} |
- |
// Load the built-in Array function from the current context. |
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) { |
// Load the Array function from the native context. |
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result); |
} |
- |
void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : number of arguments |
@@ -80,11 +76,11 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
// Initial map for the builtin InternalArray functions should be maps. |
__ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
__ SmiTst(a2, a4); |
- __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, |
- a4, Operand(zero_reg)); |
+ __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, a4, |
+ Operand(zero_reg)); |
__ GetObjectType(a2, a3, a4); |
- __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, |
- a4, Operand(MAP_TYPE)); |
+ __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, a4, |
+ Operand(MAP_TYPE)); |
} |
// Run the native code for the InternalArray function called as a normal |
@@ -94,7 +90,6 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) { |
__ TailCallStub(&stub); |
} |
- |
void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : number of arguments |
@@ -110,11 +105,11 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
// Initial map for the builtin Array functions should be maps. |
__ ld(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset)); |
__ SmiTst(a2, a4); |
- __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, |
- a4, Operand(zero_reg)); |
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, a4, |
+ Operand(zero_reg)); |
__ GetObjectType(a2, a3, a4); |
- __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, |
- a4, Operand(MAP_TYPE)); |
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, a4, |
+ Operand(MAP_TYPE)); |
} |
// Run the native code for the Array function called as a normal function. |
@@ -125,7 +120,6 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) { |
__ TailCallStub(&stub); |
} |
- |
// static |
void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) { |
// ----------- S t a t e ------------- |
@@ -274,7 +268,6 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) { |
__ DropAndRet(1); |
} |
- |
void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : number of arguments |
@@ -354,7 +347,6 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { |
} |
} |
- |
// static |
void Builtins::Generate_StringConstructor(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -427,7 +419,6 @@ void Builtins::Generate_StringConstructor(MacroAssembler* masm) { |
} |
} |
- |
void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : number of arguments |
@@ -541,7 +532,6 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm, |
__ Jump(at); |
} |
- |
void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
// Checking whether the queued function is ready for install is optional, |
// since we come across interrupts and stack checks elsewhere. However, |
@@ -558,7 +548,6 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) { |
GenerateTailCallToSharedCode(masm); |
} |
- |
static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
bool is_api_function, |
bool create_implicit_receiver, |
@@ -706,22 +695,18 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm, |
__ Ret(); |
} |
- |
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { |
Generate_JSConstructStubHelper(masm, false, true, false); |
} |
- |
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) { |
Generate_JSConstructStubHelper(masm, true, false, false); |
} |
- |
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) { |
Generate_JSConstructStubHelper(masm, false, false, false); |
} |
- |
void Builtins::Generate_JSBuiltinsConstructStubForDerived( |
MacroAssembler* masm) { |
Generate_JSConstructStubHelper(masm, false, false, true); |
@@ -808,7 +793,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { |
{ |
__ ld(a0, FieldMemOperand(a4, JSFunction::kSharedFunctionInfoOffset)); |
__ lw(a0, |
- FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset)); |
+ FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset)); |
// We abuse new.target both to indicate that this is a resume call and to |
// pass in the generator object. In ordinary calls, new.target is always |
// undefined because generator functions are non-constructable. |
@@ -889,10 +874,8 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { |
__ CallRuntime(Runtime::kThrowConstructedNonConstructable); |
} |
- |
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt }; |
- |
// Clobbers a2; preserves all other registers. |
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
IsTagged argc_is_tagged) { |
@@ -919,7 +902,6 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc, |
__ bind(&okay); |
} |
- |
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
bool is_construct) { |
// Called from JSEntryStub::GenerateBody |
@@ -959,13 +941,13 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
Label loop, entry; |
__ Dlsa(a6, s0, a3, kPointerSizeLog2); |
__ b(&entry); |
- __ nop(); // Branch delay slot nop. |
+ __ nop(); // Branch delay slot nop. |
// a6 points past last arg. |
__ bind(&loop); |
__ ld(a4, MemOperand(s0)); // Read next parameter. |
__ daddiu(s0, s0, kPointerSize); |
__ ld(a4, MemOperand(a4)); // Dereference handle. |
- __ push(a4); // Push parameter. |
+ __ push(a4); // Push parameter. |
__ bind(&entry); |
__ Branch(&loop, ne, s0, Operand(a6)); |
@@ -995,12 +977,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, |
__ Jump(ra); |
} |
- |
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) { |
Generate_JSEntryTrampolineHelper(masm, false); |
} |
- |
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) { |
Generate_JSEntryTrampolineHelper(masm, true); |
} |
@@ -1506,8 +1486,7 @@ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
// crawls in MakeCodeYoung. This seems a bit fragile. |
// Set a0 to point to the head of the PlatformCodeAge sequence. |
- __ Dsubu(a0, a0, |
- Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); |
+ __ Dsubu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); |
// The following registers must be saved and restored when calling through to |
// the runtime: |
@@ -1526,19 +1505,18 @@ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { |
__ Jump(a0); |
} |
-#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ |
-void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ |
- MacroAssembler* masm) { \ |
- GenerateMakeCodeYoungAgainCommon(masm); \ |
-} \ |
-void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ |
- MacroAssembler* masm) { \ |
- GenerateMakeCodeYoungAgainCommon(masm); \ |
-} |
+#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ |
+ void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ |
+ MacroAssembler* masm) { \ |
+ GenerateMakeCodeYoungAgainCommon(masm); \ |
+ } \ |
+ void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ |
+ MacroAssembler* masm) { \ |
+ GenerateMakeCodeYoungAgainCommon(masm); \ |
+ } |
CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) |
#undef DEFINE_CODE_AGE_BUILTIN_GENERATOR |
- |
void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
// For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact |
// that make_code_young doesn't do any garbage collection which allows us to |
@@ -1546,8 +1524,7 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
// pointers. |
// Set a0 to point to the head of the PlatformCodeAge sequence. |
- __ Dsubu(a0, a0, |
- Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); |
+ __ Dsubu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize)); |
// The following registers must be saved and restored when calling through to |
// the runtime: |
@@ -1573,17 +1550,14 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) { |
__ Jump(a0); |
} |
- |
void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) { |
GenerateMakeCodeYoungAgainCommon(masm); |
} |
- |
void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) { |
Generate_MarkCodeAsExecutedOnce(masm); |
} |
- |
static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
SaveFPRegsMode save_doubles) { |
{ |
@@ -1599,20 +1573,17 @@ static void Generate_NotifyStubFailureHelper(MacroAssembler* masm, |
} |
__ Daddu(sp, sp, Operand(kPointerSize)); // Ignore state |
- __ Jump(ra); // Jump to miss handler |
+ __ Jump(ra); // Jump to miss handler |
} |
- |
void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) { |
Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs); |
} |
- |
void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) { |
Generate_NotifyStubFailureHelper(masm, kSaveFPRegs); |
} |
- |
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
Deoptimizer::BailoutType type) { |
{ |
@@ -1650,22 +1621,18 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm, |
__ stop("no cases left"); |
} |
- |
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) { |
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER); |
} |
- |
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) { |
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT); |
} |
- |
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) { |
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY); |
} |
- |
// Clobbers {t2, t3, a4, a5}. |
static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, |
Register function_template_info, |
@@ -1729,7 +1696,6 @@ static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver, |
__ bind(&receiver_check_passed); |
} |
- |
void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : number of arguments excluding receiver |
@@ -1766,7 +1732,6 @@ void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) { |
__ TailCallRuntime(Runtime::kThrowIllegalInvocation); |
} |
- |
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
// Lookup the function in the JavaScript frame. |
__ ld(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset)); |
@@ -1787,7 +1752,8 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
// Load the OSR entrypoint offset from the deoptimization data. |
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset] |
__ ld(a1, MemOperand(a1, FixedArray::OffsetOfElementAt( |
- DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag)); |
+ DeoptimizationInputData::kOsrPcOffsetIndex) - |
+ kHeapObjectTag)); |
__ SmiUntag(a1); |
// Compute the target address = code_obj + header_size + osr_offset |
@@ -1799,7 +1765,6 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { |
__ Ret(); |
} |
- |
// static |
void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm, |
int field_index) { |
@@ -1928,7 +1893,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { |
} |
} |
- |
// static |
void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { |
// 1. Make sure we have at least one argument. |
@@ -1971,7 +1935,6 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { |
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET); |
} |
- |
void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : argc |
@@ -2035,7 +1998,6 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { |
} |
} |
- |
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : argc |
@@ -2114,7 +2076,6 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { |
} |
} |
- |
static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
Label* stack_overflow) { |
// ----------- S t a t e ------------- |
@@ -2136,17 +2097,15 @@ static void ArgumentAdaptorStackCheck(MacroAssembler* masm, |
__ Branch(stack_overflow, le, a5, Operand(at)); |
} |
- |
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) { |
// __ sll(a0, a0, kSmiTagSize); |
__ dsll32(a0, a0, 0); |
__ li(a4, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
__ MultiPush(a0.bit() | a1.bit() | a4.bit() | fp.bit() | ra.bit()); |
- __ Daddu(fp, sp, |
- Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize)); |
+ __ Daddu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp + |
+ kPointerSize)); |
} |
- |
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- v0 : result being passed through |
@@ -2163,7 +2122,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) { |
__ Daddu(sp, sp, Operand(kPointerSize)); |
} |
- |
// static |
void Builtins::Generate_Apply(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2485,7 +2443,6 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, |
} |
} |
- |
// static |
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, |
TailCallMode tail_call_mode) { |
@@ -2580,7 +2537,6 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm, |
__ Jump(at); |
} |
- |
// static |
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, |
TailCallMode tail_call_mode) { |
@@ -2640,7 +2596,6 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode, |
} |
} |
- |
void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
// -- a0 : the number of arguments (not including the receiver) |
@@ -2661,7 +2616,6 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { |
__ Jump(at); |
} |
- |
// static |
void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2753,7 +2707,6 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) { |
__ Jump(at); |
} |
- |
// static |
void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2772,7 +2725,6 @@ void Builtins::Generate_ConstructProxy(MacroAssembler* masm) { |
ExternalReference(Runtime::kJSProxyConstruct, masm->isolate())); |
} |
- |
// static |
void Builtins::Generate_Construct(MacroAssembler* masm) { |
// ----------- S t a t e ------------- |
@@ -2938,8 +2890,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
Label invoke, dont_adapt_arguments, stack_overflow; |
Label enough, too_few; |
- __ Branch(&dont_adapt_arguments, eq, |
- a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); |
+ __ Branch(&dont_adapt_arguments, eq, a2, |
+ Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel)); |
// We use Uless as the number of argument should always be greater than 0. |
__ Branch(&too_few, Uless, a0, Operand(a2)); |
@@ -3043,7 +2995,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
LeaveArgumentsAdaptorFrame(masm); |
__ Ret(); |
- |
// ------------------------------------------- |
// Don't adapt arguments. |
// ------------------------------------------- |
@@ -3059,7 +3010,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { |
} |
} |
- |
#undef __ |
} // namespace internal |