Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(38)

Unified Diff: src/arm/code-stubs-arm.cc

Issue 246643014: CodeStubs contain their corresponding Isolate* now. (part 1) (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Feedback. Rebased. Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/debug-arm.cc » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: src/arm/code-stubs-arm.cc
diff --git a/src/arm/code-stubs-arm.cc b/src/arm/code-stubs-arm.cc
index be876b6eee89c27378fcd2f79826bd0ad364a75a..d1587c8886e838a5935ccbdd7def4df8b7ac3a58 100644
--- a/src/arm/code-stubs-arm.cc
+++ b/src/arm/code-stubs-arm.cc
@@ -504,10 +504,9 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// Update the static counter each time a new code stub is generated.
- Isolate* isolate = masm->isolate();
- isolate->counters()->code_stubs()->Increment();
+ isolate()->counters()->code_stubs()->Increment();
- CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
+ CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate());
int param_count = descriptor->register_param_count_;
{
// Call the runtime system in a fresh internal frame.
@@ -533,11 +532,13 @@ void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
// stub so you don't have to set up the frame.
class ConvertToDoubleStub : public PlatformCodeStub {
public:
- ConvertToDoubleStub(Register result_reg_1,
+ ConvertToDoubleStub(Isolate* isolate,
+ Register result_reg_1,
Register result_reg_2,
Register source_reg,
Register scratch_reg)
- : result1_(result_reg_1),
+ : PlatformCodeStub(isolate),
+ result1_(result_reg_1),
result2_(result_reg_2),
source_(source_reg),
zeros_(scratch_reg) { }
@@ -726,8 +727,8 @@ void DoubleToIStub::Generate(MacroAssembler* masm) {
void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- WriteInt32ToHeapNumberStub stub1(r1, r0, r2);
- WriteInt32ToHeapNumberStub stub2(r2, r0, r3);
+ WriteInt32ToHeapNumberStub stub1(isolate, r1, r0, r2);
+ WriteInt32ToHeapNumberStub stub2(isolate, r2, r0, r3);
stub1.GetCode(isolate);
stub2.GetCode(isolate);
}
@@ -1124,7 +1125,6 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
__ bind(&both_loaded_as_doubles);
// The arguments have been converted to doubles and stored in d6 and d7, if
// VFP3 is supported, or in r0, r1, r2, and r3.
- Isolate* isolate = masm->isolate();
__ bind(&lhs_not_nan);
Label no_nan;
// ARMv7 VFP3 instructions to implement double precision comparison.
@@ -1187,7 +1187,8 @@ void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
__ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs, rhs, r2, r3, &slow);
- __ IncrementCounter(isolate->counters()->string_compare_native(), 1, r2, r3);
+ __ IncrementCounter(isolate()->counters()->string_compare_native(), 1, r2,
+ r3);
if (cc == eq) {
StringCompareStub::GenerateFlatAsciiStringEquals(masm,
lhs,
@@ -1251,9 +1252,9 @@ void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
AllowExternalCallThatCantCauseGC scope(masm);
__ PrepareCallCFunction(argument_count, fp_argument_count, scratch);
- __ mov(r0, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
__ CallCFunction(
- ExternalReference::store_buffer_overflow_function(masm->isolate()),
+ ExternalReference::store_buffer_overflow_function(isolate()),
argument_count);
if (save_doubles_ == kSaveFPRegs) {
__ RestoreFPRegs(sp, scratch);
@@ -1373,7 +1374,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ PrepareCallCFunction(0, 2, scratch);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
}
__ pop(lr);
@@ -1424,7 +1425,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ vcvt_f64_s32(double_exponent, single_scratch);
// Returning or bailing out.
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
if (exponent_type_ == ON_STACK) {
// The arguments are still on the stack.
__ bind(&call_runtime);
@@ -1447,7 +1448,7 @@ void MathPowStub::Generate(MacroAssembler* masm) {
__ PrepareCallCFunction(0, 2, scratch);
__ MovToFloatParameters(double_base, double_exponent);
__ CallCFunction(
- ExternalReference::power_double_double_function(masm->isolate()),
+ ExternalReference::power_double_double_function(isolate()),
0, 2);
}
__ pop(lr);
@@ -1479,8 +1480,8 @@ void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
void CodeStub::GenerateFPStubs(Isolate* isolate) {
SaveFPRegsMode mode = kSaveFPRegs;
- CEntryStub save_doubles(1, mode);
- StoreBufferOverflowStub stub(mode);
+ CEntryStub save_doubles(isolate, 1, mode);
+ StoreBufferOverflowStub stub(isolate, mode);
// These stubs might already be in the snapshot, detect that and don't
// regenerate, which would lead to code stub initialization state being messed
// up.
@@ -1497,7 +1498,7 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) {
void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
- CEntryStub stub(1, kDontSaveFPRegs);
+ CEntryStub stub(isolate, 1, kDontSaveFPRegs);
stub.GetCode(isolate);
}
@@ -1531,8 +1532,6 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// Result returned in r0 or r0+r1 by default.
- Isolate* isolate = masm->isolate();
-
#if V8_HOST_ARCH_ARM
int frame_alignment = MacroAssembler::ActivationFrameAlignment();
int frame_alignment_mask = frame_alignment - 1;
@@ -1551,7 +1550,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
// Call C built-in.
// r0 = argc, r1 = argv
- __ mov(r2, Operand(ExternalReference::isolate_address(isolate)));
+ __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
// To let the GC traverse the return address of the exit frames, we need to
// know where the return address is. The CEntryStub is unmovable, so
@@ -1586,7 +1585,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
__ b(eq, &exception_returned);
ExternalReference pending_exception_address(
- Isolate::kPendingExceptionAddress, isolate);
+ Isolate::kPendingExceptionAddress, isolate());
// Check that there is no pending exception, otherwise we
// should have returned the exception sentinel.
@@ -1673,15 +1672,14 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// r2: receiver
// r3: argc
// r4: argv
- Isolate* isolate = masm->isolate();
int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
if (FLAG_enable_ool_constant_pool) {
- __ mov(r8, Operand(isolate->factory()->empty_constant_pool_array()));
+ __ mov(r8, Operand(isolate()->factory()->empty_constant_pool_array()));
}
__ mov(r7, Operand(Smi::FromInt(marker)));
__ mov(r6, Operand(Smi::FromInt(marker)));
__ mov(r5,
- Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
+ Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
__ ldr(r5, MemOperand(r5));
__ mov(ip, Operand(-1)); // Push a bad frame pointer to fail if it is used.
__ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() |
@@ -1693,7 +1691,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// If this is the outermost JS call, set js_entry_sp value.
Label non_outermost_js;
- ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
+ ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
__ mov(r5, Operand(ExternalReference(js_entry_sp)));
__ ldr(r6, MemOperand(r5));
__ cmp(r6, Operand::Zero());
@@ -1723,7 +1721,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// fp will be invalid because the PushTryHandler below sets it to 0 to
// signal the existence of the JSEntry frame.
__ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
}
__ str(r0, MemOperand(ip));
__ LoadRoot(r0, Heap::kExceptionRootIndex);
@@ -1740,9 +1738,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// saved values before returning a failure to C.
// Clear any pending exceptions.
- __ mov(r5, Operand(isolate->factory()->the_hole_value()));
+ __ mov(r5, Operand(isolate()->factory()->the_hole_value()));
__ mov(ip, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
__ str(r5, MemOperand(ip));
// Invoke the function by calling through JS entry trampoline builtin.
@@ -1757,10 +1755,10 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// r4: argv
if (is_construct) {
ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
- isolate);
+ isolate());
__ mov(ip, Operand(construct_entry));
} else {
- ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
+ ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
__ mov(ip, Operand(entry));
}
__ ldr(ip, MemOperand(ip)); // deref address
@@ -1786,7 +1784,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Restore the top frame descriptors from the stack.
__ pop(r3);
__ mov(ip,
- Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate)));
+ Operand(ExternalReference(Isolate::kCEntryFPAddress, isolate())));
__ str(r3, MemOperand(ip));
// Reset the stack to the callee saved registers.
@@ -1945,7 +1943,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ b(ne, &slow);
// Null is not instance of anything.
- __ cmp(scratch, Operand(masm->isolate()->factory()->null_value()));
+ __ cmp(scratch, Operand(isolate()->factory()->null_value()));
__ b(ne, &object_not_null);
__ mov(r0, Operand(Smi::FromInt(1)));
__ Ret(HasArgsInRegisters() ? 0 : 2);
@@ -1992,7 +1990,7 @@ void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
// -- r0 : key
// -- r1 : receiver
// -----------------------------------
- __ cmp(r0, Operand(masm->isolate()->factory()->prototype_string()));
+ __ cmp(r0, Operand(isolate()->factory()->prototype_string()));
__ b(ne, &miss);
receiver = r1;
} else {
@@ -2422,11 +2420,10 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
Register last_match_info_elements = no_reg; // will be r6;
// Ensure that a RegExp stack is allocated.
- Isolate* isolate = masm->isolate();
ExternalReference address_of_regexp_stack_memory_address =
- ExternalReference::address_of_regexp_stack_memory_address(isolate);
+ ExternalReference::address_of_regexp_stack_memory_address(isolate());
ExternalReference address_of_regexp_stack_memory_size =
- ExternalReference::address_of_regexp_stack_memory_size(isolate);
+ ExternalReference::address_of_regexp_stack_memory_size(isolate());
__ mov(r0, Operand(address_of_regexp_stack_memory_size));
__ ldr(r0, MemOperand(r0, 0));
__ cmp(r0, Operand::Zero());
@@ -2568,7 +2565,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, r0, r2);
+ __ IncrementCounter(isolate()->counters()->regexp_entry_native(), 1, r0, r2);
// Isolates: note we add an additional parameter here (isolate pointer).
const int kRegExpExecuteArguments = 9;
@@ -2579,7 +2576,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Arguments are before that on the stack or in registers.
// Argument 9 (sp[20]): Pass current isolate address.
- __ mov(r0, Operand(ExternalReference::isolate_address(isolate)));
+ __ mov(r0, Operand(ExternalReference::isolate_address(isolate())));
__ str(r0, MemOperand(sp, 5 * kPointerSize));
// Argument 8 (sp[16]): Indicate that this is a direct call from JavaScript.
@@ -2601,7 +2598,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Argument 5 (sp[4]): static offsets vector buffer.
__ mov(r0,
- Operand(ExternalReference::address_of_static_offsets_vector(isolate)));
+ Operand(ExternalReference::address_of_static_offsets_vector(
+ isolate())));
__ str(r0, MemOperand(sp, 1 * kPointerSize));
// For arguments 4 and 3 get string length, calculate start of string data and
@@ -2632,7 +2630,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Locate the code entry and call it.
__ add(r6, r6, Operand(Code::kHeaderSize - kHeapObjectTag));
- DirectCEntryStub stub;
+ DirectCEntryStub stub(isolate());
stub.GenerateCall(masm, r6);
__ LeaveExitFrame(false, no_reg, true);
@@ -2659,9 +2657,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// stack overflow (on the backtrack stack) was detected in RegExp code but
// haven't created the exception yet. Handle that in the runtime system.
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
- __ mov(r1, Operand(isolate->factory()->the_hole_value()));
+ __ mov(r1, Operand(isolate()->factory()->the_hole_value()));
__ mov(r2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
- isolate)));
+ isolate())));
__ ldr(r0, MemOperand(r2, 0));
__ cmp(r0, r1);
__ b(eq, &runtime);
@@ -2681,7 +2679,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ bind(&failure);
// For failure and exception return null.
- __ mov(r0, Operand(masm->isolate()->factory()->null_value()));
+ __ mov(r0, Operand(isolate()->factory()->null_value()));
__ add(sp, sp, Operand(4 * kPointerSize));
__ Ret();
@@ -2743,7 +2741,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Get the static offsets vector filled by the native regexp code.
ExternalReference address_of_static_offsets_vector =
- ExternalReference::address_of_static_offsets_vector(isolate);
+ ExternalReference::address_of_static_offsets_vector(isolate());
__ mov(r2, Operand(address_of_static_offsets_vector));
// r1: number of capture registers
@@ -2888,7 +2886,7 @@ static void GenerateRecordCallTarget(MacroAssembler* masm) {
__ SmiTag(r0);
__ Push(r3, r2, r1, r0);
- CreateAllocationSiteStub create_stub;
+ CreateAllocationSiteStub create_stub(masm->isolate());
__ CallStub(&create_stub);
__ Pop(r3, r2, r1, r0);
@@ -2977,8 +2975,8 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// If there is a call target cache, mark it megamorphic in the
// non-function case. MegamorphicSentinel is an immortal immovable
// object (megamorphic symbol) so no write barrier is needed.
- ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(masm->isolate()),
- masm->isolate()->heap()->megamorphic_symbol());
+ ASSERT_EQ(*TypeFeedbackInfo::MegamorphicSentinel(isolate()),
+ isolate()->heap()->megamorphic_symbol());
__ add(r5, r2, Operand::PointerOffsetFromSmiKey(r3));
__ LoadRoot(ip, Heap::kMegamorphicSymbolRootIndex);
__ str(ip, FieldMemOperand(r5, FixedArray::kHeaderSize));
@@ -2992,7 +2990,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ GetBuiltinFunction(r1, Builtins::CALL_FUNCTION_PROXY);
{
Handle<Code> adaptor =
- masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
+ isolate()->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
@@ -3003,7 +3001,7 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
__ mov(r0, Operand(argc_)); // Set up the number of arguments.
__ mov(r2, Operand::Zero());
__ GetBuiltinFunction(r1, Builtins::CALL_NON_FUNCTION);
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
@@ -3080,7 +3078,7 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ bind(&do_call);
// Set expected number of arguments to zero (not changing r0).
__ mov(r2, Operand::Zero());
- __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
+ __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
RelocInfo::CODE_TARGET);
}
@@ -3608,7 +3606,7 @@ void SubStringStub::Generate(MacroAssembler* masm) {
masm, r1, r5, r2, r3, r4, r6, r9, DEST_ALWAYS_ALIGNED);
__ bind(&return_r0);
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
__ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ Drop(3);
__ Ret();
@@ -3743,7 +3741,7 @@ void StringCompareStub::GenerateAsciiCharsCompareLoop(
void StringCompareStub::Generate(MacroAssembler* masm) {
Label runtime;
- Counters* counters = masm->isolate()->counters();
+ Counters* counters = isolate()->counters();
// Stack frame on entry.
// sp[0]: right string
@@ -3783,12 +3781,11 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
// -- r0 : right
// -- lr : return address
// -----------------------------------
- Isolate* isolate = masm->isolate();
// Load r2 with the allocation site. We stick an undefined dummy value here
// and replace it with the real allocation site later when we instantiate this
// stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
- __ Move(r2, handle(isolate->heap()->undefined_value()));
+ __ Move(r2, handle(isolate()->heap()->undefined_value()));
// Make sure that we actually patched the allocation site.
if (FLAG_debug_code) {
@@ -3804,7 +3801,7 @@ void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
// Tail call into the stub that handles binary operations with allocation
// sites.
- BinaryOpWithAllocationSiteStub stub(state_);
+ BinaryOpWithAllocationSiteStub stub(isolate(), state_);
__ TailCallStub(&stub);
}
@@ -3882,9 +3879,9 @@ void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
__ bind(&unordered);
__ bind(&generic_stub);
- ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
+ ICCompareStub stub(isolate(), op_, CompareIC::GENERIC, CompareIC::GENERIC,
CompareIC::GENERIC);
- __ Jump(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ __ Jump(stub.GetCode(isolate()), RelocInfo::CODE_TARGET);
__ bind(&maybe_undefined1);
if (Token::IsOrderedRelationalCompareOp(op_)) {
@@ -4107,7 +4104,7 @@ void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
{
// Call the runtime system in a fresh internal frame.
ExternalReference miss =
- ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
+ ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ Push(r1, r0);
@@ -4139,7 +4136,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
Register target) {
intptr_t code =
- reinterpret_cast<intptr_t>(GetCode(masm->isolate()).location());
+ reinterpret_cast<intptr_t>(GetCode(isolate()).location());
__ Move(ip, target);
__ mov(lr, Operand(code, RelocInfo::CODE_TARGET));
__ blx(lr); // Call the stub.
@@ -4215,7 +4212,7 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ stm(db_w, sp, spill_mask);
__ ldr(r0, FieldMemOperand(receiver, JSObject::kPropertiesOffset));
__ mov(r1, Operand(Handle<Name>(name)));
- NameDictionaryLookupStub stub(NEGATIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), NEGATIVE_LOOKUP);
__ CallStub(&stub);
__ cmp(r0, Operand::Zero());
__ ldm(ia_w, sp, spill_mask);
@@ -4291,7 +4288,7 @@ void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
__ Move(r0, elements);
__ Move(r1, name);
}
- NameDictionaryLookupStub stub(POSITIVE_LOOKUP);
+ NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
__ CallStub(&stub);
__ cmp(r0, Operand::Zero());
__ mov(scratch2, Operand(r2));
@@ -4395,10 +4392,10 @@ void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
Isolate* isolate) {
- StoreBufferOverflowStub stub1(kDontSaveFPRegs);
+ StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
stub1.GetCode(isolate);
// Hydrogen code stubs need stub2 at snapshot time.
- StoreBufferOverflowStub stub2(kSaveFPRegs);
+ StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
stub2.GetCode(isolate);
}
@@ -4504,12 +4501,11 @@ void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
__ Move(address, regs_.address());
__ Move(r0, regs_.object());
__ Move(r1, address);
- __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
AllowExternalCallThatCantCauseGC scope(masm);
__ CallCFunction(
- ExternalReference::incremental_marking_record_write_function(
- masm->isolate()),
+ ExternalReference::incremental_marking_record_write_function(isolate()),
argument_count);
regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
}
@@ -4664,8 +4660,8 @@ void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
- CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
- __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
+ CEntryStub ces(isolate(), 1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
+ __ Call(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
int parameter_count_offset =
StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
__ ldr(r1, MemOperand(fp, parameter_count_offset));
@@ -4681,7 +4677,7 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
- ProfileEntryHookStub stub;
+ ProfileEntryHookStub stub(masm->isolate());
int code_size = masm->CallStubSize(&stub) + 2 * Assembler::kInstrSize;
PredictableCodeSizeScope predictable(masm, code_size);
__ push(lr);
@@ -4729,18 +4725,18 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
#if V8_HOST_ARCH_ARM
int32_t entry_hook =
- reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
+ reinterpret_cast<int32_t>(isolate()->function_entry_hook());
__ mov(ip, Operand(entry_hook));
#else
// Under the simulator we need to indirect the entry hook through a
// trampoline function at a known address.
// It additionally takes an isolate as a third parameter
- __ mov(r2, Operand(ExternalReference::isolate_address(masm->isolate())));
+ __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
__ mov(ip, Operand(ExternalReference(&dispatcher,
ExternalReference::BUILTIN_CALL,
- masm->isolate())));
+ isolate())));
#endif
__ Call(ip);
@@ -4758,7 +4754,7 @@ template<class T>
static void CreateArrayDispatch(MacroAssembler* masm,
AllocationSiteOverrideMode mode) {
if (mode == DISABLE_ALLOCATION_SITES) {
- T stub(GetInitialFastElementsKind(), mode);
+ T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
int last_index = GetSequenceIndexFromFastElementsKind(
@@ -4766,7 +4762,7 @@ static void CreateArrayDispatch(MacroAssembler* masm,
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(r3, Operand(kind));
- T stub(kind);
+ T stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq);
}
@@ -4808,12 +4804,14 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
ElementsKind initial = GetInitialFastElementsKind();
ElementsKind holey_initial = GetHoleyElementsKind(initial);
- ArraySingleArgumentConstructorStub stub_holey(holey_initial,
+ ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
+ holey_initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub_holey);
__ bind(&normal_sequence);
- ArraySingleArgumentConstructorStub stub(initial,
+ ArraySingleArgumentConstructorStub stub(masm->isolate(),
+ initial,
DISABLE_ALLOCATION_SITES);
__ TailCallStub(&stub);
} else if (mode == DONT_OVERRIDE) {
@@ -4841,7 +4839,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
for (int i = 0; i <= last_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
__ cmp(r3, Operand(kind));
- ArraySingleArgumentConstructorStub stub(kind);
+ ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
__ TailCallStub(&stub, eq);
}
@@ -4859,10 +4857,10 @@ static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
TERMINAL_FAST_ELEMENTS_KIND);
for (int i = 0; i <= to_index; ++i) {
ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
- T stub(kind);
+ T stub(isolate, kind);
stub.GetCode(isolate);
if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
- T stub1(kind, DISABLE_ALLOCATION_SITES);
+ T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
stub1.GetCode(isolate);
}
}
@@ -4884,11 +4882,11 @@ void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
for (int i = 0; i < 2; i++) {
// For internal arrays we only need a few things
- InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
+ InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
stubh1.GetCode(isolate);
- InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
+ InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
stubh2.GetCode(isolate);
- InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
+ InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
stubh3.GetCode(isolate);
}
}
@@ -4967,10 +4965,10 @@ void InternalArrayConstructorStub::GenerateCase(
MacroAssembler* masm, ElementsKind kind) {
__ cmp(r0, Operand(1));
- InternalArrayNoArgumentConstructorStub stub0(kind);
+ InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
__ TailCallStub(&stub0, lo);
- InternalArrayNArgumentsConstructorStub stubN(kind);
+ InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
__ TailCallStub(&stubN, hi);
if (IsFastPackedElementsKind(kind)) {
@@ -4980,11 +4978,11 @@ void InternalArrayConstructorStub::GenerateCase(
__ cmp(r3, Operand::Zero());
InternalArraySingleArgumentConstructorStub
- stub1_holey(GetHoleyElementsKind(kind));
+ stub1_holey(isolate(), GetHoleyElementsKind(kind));
__ TailCallStub(&stub1_holey, ne);
}
- InternalArraySingleArgumentConstructorStub stub1(kind);
+ InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
__ TailCallStub(&stub1);
}
@@ -5073,8 +5071,6 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
STATIC_ASSERT(FCA::kHolderIndex == 0);
STATIC_ASSERT(FCA::kArgsLength == 7);
- Isolate* isolate = masm->isolate();
-
// context save
__ push(context);
// load context from callee
@@ -5096,7 +5092,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
__ push(scratch);
// isolate
__ mov(scratch,
- Operand(ExternalReference::isolate_address(isolate)));
+ Operand(ExternalReference::isolate_address(isolate())));
__ push(scratch);
// holder
__ push(holder);
@@ -5132,7 +5128,7 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
ExternalReference::Type thunk_type = ExternalReference::PROFILING_API_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
AllowExternalCallThatCantCauseGC scope(masm);
MemOperand context_restore_operand(
@@ -5183,7 +5179,7 @@ void CallApiGetterStub::Generate(MacroAssembler* masm) {
ExternalReference::PROFILING_GETTER_CALL;
ApiFunction thunk_fun(thunk_address);
ExternalReference thunk_ref = ExternalReference(&thunk_fun, thunk_type,
- masm->isolate());
+ isolate());
__ CallApiFunctionAndReturn(api_function_address,
thunk_ref,
kStackUnwindSpace,
« no previous file with comments | « src/arm/code-stubs-arm.h ('k') | src/arm/debug-arm.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698