Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(379)

Unified Diff: src/mips/code-stubs-mips.cc

Issue 252383005: MIPS: CodeStubs contain their corresponding Isolate* now. (part 1) (Closed) Base URL: https://github.com/v8/v8.git@gbl
Patch Set: Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/debug-mips.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/mips/code-stubs-mips.cc
diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
index 1d94e70b659ff147955fd17afcd2a6530c01025d..fda01b0a9255452ad294b29a91f641682e963b7b 100644
--- a/src/mips/code-stubs-mips.cc
+++ b/src/mips/code-stubs-mips.cc
@@ -494,10 +494,9 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// Update the static counter each time a new code stub is generated.
- Isolate* isolate = masm->isolate();
- isolate->counters()->code_stubs()->Increment();
+ isolate()->counters()->code_stubs()->Increment();
- CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+ CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
int param_count = descriptor->register_param_count_;
{
// Call the runtime system in a fresh internal frame.
@@ -526,11 +525,13 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// stub so you don't have to set up the frame.
class ConvertToDoubleStub : public PlatformCodeStub {
public:
- ConvertToDoubleStub(Register result_reg_1,
+ ConvertToDoubleStub(Isolate* isolate,
+ Register result_reg_1,
Register result_reg_2,
Register source_reg,
Register scratch_reg)
- : result1_(result_reg_1),
+ : PlatformCodeStub(isolate),
+ result1_(result_reg_1),
result2_(result_reg_2),
source_(source_reg),
zeros_(scratch_reg) { }
@@ -761,8 +762,8 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
- WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
+ WriteInt32ToHeapNumberStub stub1(isolate, a1, v0, a2, a3);
+ WriteInt32ToHeapNumberStub stub2(isolate, a2, v0, a3, a0);
stub1.GetCode(isolate);
stub2.GetCode(isolate);
}
@@ -1154,8 +1155,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
// f12, f14 are the double representations of the left hand side
// and the right hand side if we have FPU. Otherwise a2, a3 represent
// left hand side and a0, a1 represent right hand side.
-
- Isolate* isolate = masm->isolate();
Label nan;
__ li(t0, Operand(LESS));
__ li(t1, Operand(GREATER));
@@ -1230,7 +1229,8 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
__ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, a2, a3, &slow);
- __ IncrementCounter(isolate->counters()->string_compare_native(), 1, a2, a3);
+ __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, a2,
+ a3);
if (cc == eq) {
StringCompareStub::GenerateFlatAsciiStringEquals(masm,
lhs,
@@ -1318,9 +1318,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
AllowExternalCallThatCantCauseGC scope(masm);
__ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
- __ li(a0, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
__ CallCFunction(
- ExternalReference::store_buffer_overflow_function(masm->isolate()),
+ ExternalReference::store_buffer_overflow_function(isolate()),
argument_count);
if (save_doubles_ == kSaveFPRegs) {
__ MultiPopFPU(kCallerSavedFPU);
@@ -1449,7 +1449,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ PrepareCallCFunction(0, 2, scratch2);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
}
__ pop(ra);
@@ -1510,7 +1510,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ cvt_d_w(double_exponent, single_scratch);
// Returning or bailing out.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
if (exponent_type_ == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
@@ -1533,7 +1533,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ PrepareCallCFunction(0, 2, scratch);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
}
__ pop(ra);
@@ -1567,28 +1567,28 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
void StoreRegistersStateStub::GenerateAheadOfTime(
Isolate* isolate) {
- StoreRegistersStateStub stub1(kDontSaveFPRegs);
+ StoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
// Hydrogen code stubs need stub2 at snapshot time.
- StoreRegistersStateStub stub2(kSaveFPRegs);
+ StoreRegistersStateStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
void RestoreRegistersStateStub::GenerateAheadOfTime(
Isolate* isolate) {
- RestoreRegistersStateStub stub1(kDontSaveFPRegs);
+ RestoreRegistersStateStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
// Hydrogen code stubs need stub2 at snapshot time.
- RestoreRegistersStateStub stub2(kSaveFPRegs);
+ RestoreRegistersStateStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
void CodeStub::GenerateFPStubs(Isolate* isolate) {
SaveFPRegsMode mode = kSaveFPRegs;
- CEntryStub save_doubles(1, mode);
- StoreBufferOverflowStub stub(mode);
+ CEntryStub save_doubles(isolate, 1, mode);
+ StoreBufferOverflowStub stub(isolate, mode);
// These stubs might already be in the snapshot, detect that and don't
// regenerate, which would lead to code stub initialization state being messed
// up.
@@ -1605,7 +1605,7 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) {
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
- CEntryStub stub(1, kDontSaveFPRegs);
+ CEntryStub stub(isolate, 1, kDontSaveFPRegs);
stub.GetCode(isolate);
}
@@ -1637,8 +1637,6 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// s1: pointer to first argument (C callee-saved)
// s2: pointer to builtin function (C callee-saved)
- Isolate* isolate = masm->isolate();
-
// Prepare arguments for C routine.
// a0 = argc
__ mov(a0, s0);
@@ -1649,7 +1647,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
__ AssertStackIsAligned();
- __ li(a2, Operand(ExternalReference::isolate_address(isolate)));
+ __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
// To let the GC traverse the return address of the exit frames, we need to
// know where the return address is. The CEntryStub is unmovable, so
@@ -1701,7 +1699,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
__ Branch(&exception_returned, eq, t0, Operand(v0));
ExternalReference pending_exception_address(
- Isolate::kPendingExceptionAddress, isolate);
+ Isolate::kPendingExceptionAddress, isolate());
// Check that there is no pending exception, otherwise we
// should have returned the exception sentinel.
@@ -1731,7 +1729,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
__ lw(v0, MemOperand(a2));
// Clear the pending exception.
- __ li(a3, Operand(isolate->factory()->the_hole_value()));
+ __ li(a3, Operand(isolate()->factory()->the_hole_value()));
__ sw(a3, MemOperand(a2));
// Special handling of termination exceptions which are uncatchable
@@ -2061,7 +2059,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ Branch(&object_not_null,
ne,
scratch,
- Operand(masm->isolate()->factory()->null_value()));
+ Operand(isolate()->factory()->null_value()));
__ li(v0, Operand(Smi::FromInt(1)));
__ DropAndRet(HasArgsInRegisters() ? 0 : 2);
@@ -2109,7 +2107,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
// -- a1 : receiver
// -----------------------------------
__ Branch(&miss, ne, a0,
- Operand(masm->isolate()->factory()->prototype_string()));
+ Operand(isolate()->factory()->prototype_string()));
receiver = a1;
} else {
ASSERT(kind() == Code::LOAD_IC);
@@ -2550,8 +2548,6 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
const int kSubjectOffset = 2 * kPointerSize;
const int kJSRegExpOffset = 3 * kPointerSize;
- Isolate* isolate = masm->isolate();
-
Label runtime;
// Allocation of registers for this function. These are in callee save
// registers and will be preserved by the call to the native RegExp code, as
@@ -2566,9 +2562,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Ensure that a RegExp stack is allocated.
ExternalReference address_of_regexp_stack_memory_address =
ExternalReference::address_of_regexp_stack_memory_address(
- isolate);
+ isolate());
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size(isolate);
+ ExternalReference::address_of_regexp_stack_memory_size(isolate());
__ li(a0, Operand(address_of_regexp_stack_memory_size));
__ lw(a0, MemOperand(a0, 0));
__ Branch(&runtime, eq, a0, Operand(zero_reg));
@@ -2715,7 +2711,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(isolate->counters()->regexp_entry_native(),
+ __ IncrementCounter(isolate()->counters()->regexp_entry_native(),
1, a0, a2);
// Isolates: note we add an additional parameter here (isolate pointer).
@@ -2739,7 +2735,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Argument 9: Pass current isolate address.
// CFunctionArgumentOperand handles MIPS stack argument slots.
- __ li(a0, Operand(ExternalReference::isolate_address(isolate)));
+ __ li(a0, Operand(ExternalReference::isolate_address(isolate())));
__ sw(a0, MemOperand(sp, 5 * kPointerSize));
// Argument 8: Indicate that this is a direct call from JavaScript.
@@ -2761,7 +2757,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Argument 5: static offsets vector buffer.
__ li(a0, Operand(
- ExternalReference::address_of_static_offsets_vector(isolate)));
+ ExternalReference::address_of_static_offsets_vector(isolate())));
__ sw(a0, MemOperand(sp, 1 * kPointerSize));
// For arguments 4 and 3 get string length, calculate start of string data
@@ -2794,7 +2790,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Locate the code entry and call it.
__ Addu(t9, t9, Operand(Code::kHeaderSize - kHeapObjectTag));
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(masm, t9);
__ LeaveExitFrame(false, no_reg, true);
@@ -2816,9 +2812,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
- __ li(a1, Operand(isolate->factory()->the_hole_value()));
+ __ li(a1, Operand(isolate()->factory()->the_hole_value()));
__ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
__ lw(v0, MemOperand(a2, 0));
__ Branch(&runtime, eq, v0, Operand(a1));
@@ -2836,7 +2832,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ bind(&failure);
// For failure and exception return null.
- __ li(v0, Operand(isolate->factory()->null_value()));
+ __ li(v0, Operand(isolate()->factory()->null_value()));
__ DropAndRet(4);
// Process the result from the native regexp code.
@@ -2897,7 +2893,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Get the static offsets vector filled by the native regexp code.
ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector(isolate);
+ ExternalReference::address_of_static_offsets_vector(isolate());
__ li(a2, Operand(address_of_static_offsets_vector));
// a1: number of capture registers
@@ -3050,7 +3046,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ SmiTag(a0);
__ MultiPush(kSavedRegs);
- CreateAllocationSiteStub create_stub;
+ CreateAllocationSiteStub create_stub(masm->isolate());
__ CallStub(&create_stub);
__ MultiPop(kSavedRegs);
@@ -3138,8 +3134,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// If there is a call target cache, mark it megamorphic in the
// non-function case. MegamorphicSentinel is an immortal immovable
// object (megamorphic symbol) so no write barrier is needed.
- ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
- masm->isolate()->heap()->megamorphic_symbol());
+ ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
+ isolate()->heap()->megamorphic_symbol());
__ sll(t1, a3, kPointerSizeLog2 - kSmiTagSize);
__ Addu(t1, a2, Operand(t1));
__ LoadRoot(at, Heap::kMegamorphicSymbolRootIndex);
@@ -3153,7 +3149,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ GetBuiltinFunction(a1, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
- masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
@@ -3164,7 +3160,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ li(a0, Operand(argc_)); // Set up the number of arguments.
__ li(a2, Operand(0, RelocInfo::NONE32));
__ GetBuiltinFunction(a1, Builtins::CALL_NON_FUNCTION);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
@@ -3242,7 +3238,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ bind(&do_call);
// Set expected number of arguments to zero (not changing r0).
__ li(a2, Operand(0, RelocInfo::NONE32));
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
@@ -3770,7 +3766,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED);
__ bind(&return_v0);
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, a3, t0);
__ DropAndRet(3);
@@ -3914,7 +3910,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
void StringCompareStub::Generate(MacroAssembler* masm) {
Label runtime;
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
// Stack frame on entry.
// sp[0]: right string
@@ -3951,12 +3947,11 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
// -- a0 : right
// -- ra : return address
// -----------------------------------
- Isolate* isolate = masm->isolate();
// Load a2 with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ li(a2, handle(isolate->heap()->undefined_value()));
+ __ li(a2, handle(isolate()->heap()->undefined_value()));
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
@@ -3969,7 +3964,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
// Tail call into the stub that handles binary operations with allocation
// sites.
- BinaryOpWithAllocationSiteStub stub(state_);
+ BinaryOpWithAllocationSiteStub stub(isolate(), state_);
__ TailCallStub(&stub);
}
@@ -4065,9 +4060,9 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
__ bind(&unordered);
__ bind(&generic_stub);
- ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+ ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
CompareIC::GENERIC);
- __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
__ bind(&maybe_undefined1);
if (Token::IsOrderedRelationalCompareOp(op_)) {
@@ -4301,7 +4296,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
{
// Call the runtime system in a fresh internal frame.
ExternalReference miss =
- ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(a1, a0);
__ Push(ra, a1, a0);
@@ -4346,7 +4341,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
intptr_t loc =
- reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
+ reinterpret_cast<intptr_t>(GetCode(isolate()).location());
__ Move(t9, target);
__ li(ra, Operand(loc, RelocInfo::CODE_TARGET), CONSTANT_SIZE);
__ Call(ra);
@@ -4421,7 +4416,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ MultiPush(spill_mask);
__ lw(a0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ li(a1, Operand(Handle<Name>(name)));
- NameDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
__ CallStub(&stub);
__ mov(at, v0);
__ MultiPop(spill_mask);
@@ -4500,7 +4495,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
__ Move(a0, elements);
__ Move(a1, name);
}
- NameDictionaryLookupStub stub(POSITIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
__ CallStub(&stub);
__ mov(scratch2, a2);
__ mov(at, v0);
@@ -4608,10 +4603,10 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+ StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
// Hydrogen code stubs need stub2 at snapshot time.
- StoreBufferOverflowStub stub2(kSaveFPRegs);
+ StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
@@ -4714,12 +4709,11 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
__ Move(address, regs_.address());
__ Move(a0, regs_.object());
__ Move(a1, address);
- __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(
- ExternalReference::incremental_marking_record_write_function(
- masm->isolate()),
+ ExternalReference::incremental_marking_record_write_function(isolate()),
argument_count);
regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
}
@@ -4879,8 +4873,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
- CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
- __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ lw(a1, MemOperand(fp, parameter_count_offset));
@@ -4896,7 +4890,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
- ProfileEntryHookStub stub;
+ ProfileEntryHookStub stub(masm->isolate());
__ push(ra);
__ CallStub(&stub);
__ pop(ra);
@@ -4938,18 +4932,18 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
__ Subu(sp, sp, kCArgsSlotsSize);
#if defined(V8_HOST_ARCH_MIPS)
int32_t entry_hook =
- reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
+ reinterpret_cast<int32_t>(isolate()->function_entry_hook());
__ li(t9, Operand(entry_hook));
#else
// Under the simulator we need to indirect the entry hook through a
// trampoline function at a known address.
// It additionally takes an isolate as a third parameter.
- __ li(a2, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ li(a2, Operand(ExternalReference::isolate_address(isolate())));
ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
__ li(t9, Operand(ExternalReference(&dispatcher,
ExternalReference::BUILTIN_CALL,
- masm->isolate())));
+ isolate())));
#endif
// Call C function through t9 to conform ABI for PIC.
__ Call(t9);
@@ -4971,14 +4965,14 @@ template<class T>
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
- T stub(GetInitialFastElementsKind(), mode);
+ T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index = GetSequenceIndexFromFastElementsKind(
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
@@ -5019,12 +5013,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
- ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+ ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+ holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
__ bind(&normal_sequence);
- ArraySingleArgumentConstructorStub stub(initial,
+ ArraySingleArgumentConstructorStub stub(masm->isolate(),
+ initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
@@ -5052,7 +5048,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- ArraySingleArgumentConstructorStub stub(kind);
+ ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq, a3, Operand(kind));
}
@@ -5070,10 +5066,10 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= to_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(isolate, kind);
stub.GetCode(isolate);
if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
- T stub1(kind, DISABLE_ALLOCATION_SITES);
+ T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
stub1.GetCode(isolate);
}
}
@@ -5095,11 +5091,11 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things.
- InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+ InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
stubh1.GetCode(isolate);
- InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+ InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode(isolate);
- InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+ InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
stubh3.GetCode(isolate);
}
}
@@ -5178,10 +5174,10 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
- InternalArrayNoArgumentConstructorStub stub0(kind);
+ InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0, lo, a0, Operand(1));
- InternalArrayNArgumentsConstructorStub stubN(kind);
+ InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
__ TailCallStub(&stubN, hi, a0, Operand(1));
if (IsFastPackedElementsKind(kind)) {
@@ -5190,11 +5186,11 @@ void InternalArrayConstructorStub::GenerateCase(
__ lw(at, MemOperand(sp, 0));
InternalArraySingleArgumentConstructorStub
- stub1_holey(GetHoleyElementsKind(kind));
+ stub1_holey(isolate(), GetHoleyElementsKind(kind));
__ TailCallStub(&stub1_holey, ne, at, Operand(zero_reg));
}
- InternalArraySingleArgumentConstructorStub stub1(kind);
+ InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
__ TailCallStub(&stub1);
}
@@ -5284,8 +5280,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(FCA::kHolderIndex == 0);
STATIC_ASSERT(FCA::kArgsLength == 7);
- Isolate* isolate = masm->isolate();
-
// Save context, callee and call data.
__ Push(context, callee, call_data);
// Load context from callee.
@@ -5298,7 +5292,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
// Push return value and default return value.
__ Push(scratch, scratch);
__ li(scratch,
- Operand(ExternalReference::isolate_address(isolate)));
+ Operand(ExternalReference::isolate_address(isolate())));
// Push isolate and holder.
__ Push(scratch, holder);
@@ -5332,7 +5326,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
AllowExternalCallThatCantCauseGC scope(masm);
MemOperand context_restore_operand(
@@ -5383,7 +5377,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
ExternalReference::PROFILING_GETTER_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
__ CallApiFunctionAndReturn(api_function_address,
thunk_ref,
kStackUnwindSpace,
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/debug-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698