| Index: src/builtins/mips/builtins-mips.cc
|
| diff --git a/src/mips/builtins-mips.cc b/src/builtins/mips/builtins-mips.cc
|
| similarity index 98%
|
| rename from src/mips/builtins-mips.cc
|
| rename to src/builtins/mips/builtins-mips.cc
|
| index 638098c6a7b48c3bdc91e5b1c64e214cb45a37b2..b8d91e01ee93dad828903e2d828d46e58c5e2671 100644
|
| --- a/src/mips/builtins-mips.cc
|
| +++ b/src/builtins/mips/builtins-mips.cc
|
| @@ -10,11 +10,9 @@
|
| #include "src/full-codegen/full-codegen.h"
|
| #include "src/runtime/runtime.h"
|
|
|
| -
|
| namespace v8 {
|
| namespace internal {
|
|
|
| -
|
| #define __ ACCESS_MASM(masm)
|
|
|
| void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
|
| @@ -50,7 +48,6 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id,
|
| exit_frame_type == BUILTIN_EXIT);
|
| }
|
|
|
| -
|
| // Load the built-in InternalArray function from the current context.
|
| static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
|
| Register result) {
|
| @@ -58,14 +55,12 @@ static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
|
| __ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
|
| }
|
|
|
| -
|
| // Load the built-in Array function from the current context.
|
| static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
|
| // Load the Array function from the native context.
|
| __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
|
| }
|
|
|
| -
|
| void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- a0 : number of arguments
|
| @@ -81,11 +76,11 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
|
| // Initial map for the builtin InternalArray functions should be maps.
|
| __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
|
| __ SmiTst(a2, t0);
|
| - __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
|
| - t0, Operand(zero_reg));
|
| + __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction, t0,
|
| + Operand(zero_reg));
|
| __ GetObjectType(a2, a3, t0);
|
| - __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
|
| - t0, Operand(MAP_TYPE));
|
| + __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction, t0,
|
| + Operand(MAP_TYPE));
|
| }
|
|
|
| // Run the native code for the InternalArray function called as a normal
|
| @@ -95,7 +90,6 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
|
| __ TailCallStub(&stub);
|
| }
|
|
|
| -
|
| void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- a0 : number of arguments
|
| @@ -111,11 +105,11 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
|
| // Initial map for the builtin Array functions should be maps.
|
| __ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
|
| __ SmiTst(a2, t0);
|
| - __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
|
| - t0, Operand(zero_reg));
|
| + __ Assert(ne, kUnexpectedInitialMapForArrayFunction1, t0,
|
| + Operand(zero_reg));
|
| __ GetObjectType(a2, a3, t0);
|
| - __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
|
| - t0, Operand(MAP_TYPE));
|
| + __ Assert(eq, kUnexpectedInitialMapForArrayFunction2, t0,
|
| + Operand(MAP_TYPE));
|
| }
|
|
|
| // Run the native code for the Array function called as a normal function.
|
| @@ -126,7 +120,6 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
|
| __ TailCallStub(&stub);
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_MathMaxMin(MacroAssembler* masm, MathMaxMinKind kind) {
|
| // ----------- S t a t e -------------
|
| @@ -277,7 +270,6 @@ void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
|
| __ DropAndRet(1);
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| @@ -357,7 +349,6 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
|
| }
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| @@ -429,7 +420,6 @@ void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
|
| }
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| @@ -544,7 +534,6 @@ static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
|
| __ Jump(at);
|
| }
|
|
|
| -
|
| void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
|
| // Checking whether the queued function is ready for install is optional,
|
| // since we come across interrupts and stack checks elsewhere. However,
|
| @@ -561,7 +550,6 @@ void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
|
| GenerateTailCallToSharedCode(masm);
|
| }
|
|
|
| -
|
| static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
| bool is_api_function,
|
| bool create_implicit_receiver,
|
| @@ -712,38 +700,31 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
|
| __ Ret();
|
| }
|
|
|
| -
|
| void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
|
| Generate_JSConstructStubHelper(masm, false, true, false);
|
| }
|
|
|
| -
|
| void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
|
| Generate_JSConstructStubHelper(masm, true, false, false);
|
| }
|
|
|
| -
|
| void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
|
| Generate_JSConstructStubHelper(masm, false, false, false);
|
| }
|
|
|
| -
|
| void Builtins::Generate_JSBuiltinsConstructStubForDerived(
|
| MacroAssembler* masm) {
|
| Generate_JSConstructStubHelper(masm, false, false, true);
|
| }
|
|
|
| -
|
| void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
|
| FrameScope scope(masm, StackFrame::INTERNAL);
|
| __ Push(a1);
|
| __ CallRuntime(Runtime::kThrowConstructedNonConstructable);
|
| }
|
|
|
| -
|
| enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
|
|
|
| -
|
| // Clobbers a2; preserves all other registers.
|
| static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
|
| IsTagged argc_is_tagged) {
|
| @@ -771,7 +752,6 @@ static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
|
| __ bind(&okay);
|
| }
|
|
|
| -
|
| static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
| bool is_construct) {
|
| // Called from JSEntryStub::GenerateBody
|
| @@ -811,13 +791,13 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
| Label loop, entry;
|
| __ Lsa(t2, s0, a3, kPointerSizeLog2);
|
| __ b(&entry);
|
| - __ nop(); // Branch delay slot nop.
|
| + __ nop(); // Branch delay slot nop.
|
| // t2 points past last arg.
|
| __ bind(&loop);
|
| __ lw(t0, MemOperand(s0)); // Read next parameter.
|
| __ addiu(s0, s0, kPointerSize);
|
| __ lw(t0, MemOperand(t0)); // Dereference handle.
|
| - __ push(t0); // Push parameter.
|
| + __ push(t0); // Push parameter.
|
| __ bind(&entry);
|
| __ Branch(&loop, ne, s0, Operand(t2));
|
|
|
| @@ -848,12 +828,10 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
|
| __ Jump(ra);
|
| }
|
|
|
| -
|
| void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
|
| Generate_JSEntryTrampolineHelper(masm, false);
|
| }
|
|
|
| -
|
| void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
|
| Generate_JSEntryTrampolineHelper(masm, true);
|
| }
|
| @@ -939,7 +917,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
|
| {
|
| __ lw(a0, FieldMemOperand(t0, JSFunction::kSharedFunctionInfoOffset));
|
| __ lw(a0,
|
| - FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
|
| + FieldMemOperand(a0, SharedFunctionInfo::kFormalParameterCountOffset));
|
| __ SmiUntag(a0);
|
| // We abuse new.target both to indicate that this is a resume call and to
|
| // pass in the generator object. In ordinary calls, new.target is always
|
| @@ -1468,7 +1446,6 @@ void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
|
| Runtime::kCompileOptimized_NotConcurrent);
|
| }
|
|
|
| -
|
| void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
|
| GenerateTailCallToReturnedCode(masm, Runtime::kCompileOptimized_Concurrent);
|
| }
|
| @@ -1518,8 +1495,7 @@ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
|
| // crawls in MakeCodeYoung. This seems a bit fragile.
|
|
|
| // Set a0 to point to the head of the PlatformCodeAge sequence.
|
| - __ Subu(a0, a0,
|
| - Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
|
| + __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
|
|
|
| // The following registers must be saved and restored when calling through to
|
| // the runtime:
|
| @@ -1538,19 +1514,18 @@ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
|
| __ Jump(a0);
|
| }
|
|
|
| -#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
|
| -void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
|
| - MacroAssembler* masm) { \
|
| - GenerateMakeCodeYoungAgainCommon(masm); \
|
| -} \
|
| -void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
|
| - MacroAssembler* masm) { \
|
| - GenerateMakeCodeYoungAgainCommon(masm); \
|
| -}
|
| +#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \
|
| + void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \
|
| + MacroAssembler* masm) { \
|
| + GenerateMakeCodeYoungAgainCommon(masm); \
|
| + } \
|
| + void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \
|
| + MacroAssembler* masm) { \
|
| + GenerateMakeCodeYoungAgainCommon(masm); \
|
| + }
|
| CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR)
|
| #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR
|
|
|
| -
|
| void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
|
| // For now, as in GenerateMakeCodeYoungAgainCommon, we are relying on the fact
|
| // that make_code_young doesn't do any garbage collection which allows us to
|
| @@ -1558,8 +1533,7 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
|
| // pointers.
|
|
|
| // Set a0 to point to the head of the PlatformCodeAge sequence.
|
| - __ Subu(a0, a0,
|
| - Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
|
| + __ Subu(a0, a0, Operand(kNoCodeAgeSequenceLength - Assembler::kInstrSize));
|
|
|
| // The following registers must be saved and restored when calling through to
|
| // the runtime:
|
| @@ -1585,17 +1559,14 @@ void Builtins::Generate_MarkCodeAsExecutedOnce(MacroAssembler* masm) {
|
| __ Jump(a0);
|
| }
|
|
|
| -
|
| void Builtins::Generate_MarkCodeAsExecutedTwice(MacroAssembler* masm) {
|
| GenerateMakeCodeYoungAgainCommon(masm);
|
| }
|
|
|
| -
|
| void Builtins::Generate_MarkCodeAsToBeExecutedOnce(MacroAssembler* masm) {
|
| Generate_MarkCodeAsExecutedOnce(masm);
|
| }
|
|
|
| -
|
| static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
|
| SaveFPRegsMode save_doubles) {
|
| {
|
| @@ -1611,20 +1582,17 @@ static void Generate_NotifyStubFailureHelper(MacroAssembler* masm,
|
| }
|
|
|
| __ Addu(sp, sp, Operand(kPointerSize)); // Ignore state
|
| - __ Jump(ra); // Jump to miss handler
|
| + __ Jump(ra); // Jump to miss handler
|
| }
|
|
|
| -
|
| void Builtins::Generate_NotifyStubFailure(MacroAssembler* masm) {
|
| Generate_NotifyStubFailureHelper(masm, kDontSaveFPRegs);
|
| }
|
|
|
| -
|
| void Builtins::Generate_NotifyStubFailureSaveDoubles(MacroAssembler* masm) {
|
| Generate_NotifyStubFailureHelper(masm, kSaveFPRegs);
|
| }
|
|
|
| -
|
| static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
|
| Deoptimizer::BailoutType type) {
|
| {
|
| @@ -1660,22 +1628,18 @@ static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
|
| __ stop("no cases left");
|
| }
|
|
|
| -
|
| void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
|
| Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
|
| }
|
|
|
| -
|
| void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
|
| Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
|
| }
|
|
|
| -
|
| void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
|
| Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
|
| }
|
|
|
| -
|
| // Clobbers {t2, t3, t4, t5}.
|
| static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
|
| Register function_template_info,
|
| @@ -1738,7 +1702,6 @@ static void CompatibleReceiverCheck(MacroAssembler* masm, Register receiver,
|
| __ bind(&receiver_check_passed);
|
| }
|
|
|
| -
|
| void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- a0 : number of arguments excluding receiver
|
| @@ -1775,7 +1738,6 @@ void Builtins::Generate_HandleFastApiCall(MacroAssembler* masm) {
|
| __ TailCallRuntime(Runtime::kThrowIllegalInvocation);
|
| }
|
|
|
| -
|
| void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
| // Lookup the function in the JavaScript frame.
|
| __ lw(a0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
|
| @@ -1796,7 +1758,8 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
| // Load the OSR entrypoint offset from the deoptimization data.
|
| // <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
|
| __ lw(a1, MemOperand(a1, FixedArray::OffsetOfElementAt(
|
| - DeoptimizationInputData::kOsrPcOffsetIndex) - kHeapObjectTag));
|
| + DeoptimizationInputData::kOsrPcOffsetIndex) -
|
| + kHeapObjectTag));
|
| __ SmiUntag(a1);
|
|
|
| // Compute the target address = code_obj + header_size + osr_offset
|
| @@ -1808,7 +1771,6 @@ void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
|
| __ Ret();
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_DatePrototype_GetField(MacroAssembler* masm,
|
| int field_index) {
|
| @@ -1937,7 +1899,6 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
|
| }
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
| // 1. Make sure we have at least one argument.
|
| @@ -1980,7 +1941,6 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
|
| __ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
|
| }
|
|
|
| -
|
| void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- a0 : argc
|
| @@ -2044,7 +2004,6 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
|
| }
|
| }
|
|
|
| -
|
| void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- a0 : argc
|
| @@ -2123,7 +2082,6 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
|
| }
|
| }
|
|
|
| -
|
| static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
|
| Label* stack_overflow) {
|
| // ----------- S t a t e -------------
|
| @@ -2145,16 +2103,14 @@ static void ArgumentAdaptorStackCheck(MacroAssembler* masm,
|
| __ Branch(stack_overflow, le, t1, Operand(at));
|
| }
|
|
|
| -
|
| static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
|
| __ sll(a0, a0, kSmiTagSize);
|
| __ li(t0, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
|
| __ MultiPush(a0.bit() | a1.bit() | t0.bit() | fp.bit() | ra.bit());
|
| - __ Addu(fp, sp,
|
| - Operand(StandardFrameConstants::kFixedFrameSizeFromFp + kPointerSize));
|
| + __ Addu(fp, sp, Operand(StandardFrameConstants::kFixedFrameSizeFromFp +
|
| + kPointerSize));
|
| }
|
|
|
| -
|
| static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| // -- v0 : result being passed through
|
| @@ -2170,7 +2126,6 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
|
| __ Addu(sp, sp, Operand(kPointerSize));
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_Apply(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| @@ -2494,7 +2449,6 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
|
| }
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
|
| TailCallMode tail_call_mode) {
|
| @@ -2589,7 +2543,6 @@ void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm,
|
| __ Jump(at);
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
|
| TailCallMode tail_call_mode) {
|
| @@ -2649,7 +2602,6 @@ void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode,
|
| }
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| @@ -2671,7 +2623,6 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
|
| __ Jump(at);
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| @@ -2763,7 +2714,6 @@ void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
|
| __ Jump(at);
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| @@ -2782,7 +2732,6 @@ void Builtins::Generate_ConstructProxy(MacroAssembler* masm) {
|
| ExternalReference(Runtime::kJSProxyConstruct, masm->isolate()));
|
| }
|
|
|
| -
|
| // static
|
| void Builtins::Generate_Construct(MacroAssembler* masm) {
|
| // ----------- S t a t e -------------
|
| @@ -2948,8 +2897,8 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
| Label invoke, dont_adapt_arguments, stack_overflow;
|
|
|
| Label enough, too_few;
|
| - __ Branch(&dont_adapt_arguments, eq,
|
| - a2, Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
|
| + __ Branch(&dont_adapt_arguments, eq, a2,
|
| + Operand(SharedFunctionInfo::kDontAdaptArgumentsSentinel));
|
| // We use Uless as the number of argument should always be greater than 0.
|
| __ Branch(&too_few, Uless, a0, Operand(a2));
|
|
|
| @@ -3051,7 +3000,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
| LeaveArgumentsAdaptorFrame(masm);
|
| __ Ret();
|
|
|
| -
|
| // -------------------------------------------
|
| // Don't adapt arguments.
|
| // -------------------------------------------
|
| @@ -3067,7 +3015,6 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
|
| }
|
| }
|
|
|
| -
|
| #undef __
|
|
|
| } // namespace internal
|
|
|