| Index: src/mips/code-stubs-mips.cc
|
| diff --git a/src/mips/code-stubs-mips.cc b/src/mips/code-stubs-mips.cc
|
| index 83200827d10a8b07c070af1f7d0764784f879b8c..fe251b9e6fa9d27cccad365a11645f6dffff6550 100644
|
| --- a/src/mips/code-stubs-mips.cc
|
| +++ b/src/mips/code-stubs-mips.cc
|
| @@ -190,6 +190,71 @@ void FastNewContextStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| +void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
|
| + // Stack layout on entry:
|
| + //
|
| + // [sp]: function.
|
| + // [sp + kPointerSize]: serialized scope info
|
| +
|
| + // Try to allocate the context in new space.
|
| + Label gc;
|
| + int length = slots_ + Context::MIN_CONTEXT_SLOTS;
|
| + __ AllocateInNewSpace(FixedArray::SizeFor(length),
|
| + v0, a1, a2, &gc, TAG_OBJECT);
|
| +
|
| + // Load the function from the stack.
|
| + __ lw(a3, MemOperand(sp, 0));
|
| +
|
| + // Load the serialized scope info from the stack.
|
| + __ lw(a1, MemOperand(sp, 1 * kPointerSize));
|
| +
|
| + // Setup the object header.
|
| + __ LoadRoot(a2, Heap::kBlockContextMapRootIndex);
|
| + __ sw(a2, FieldMemOperand(v0, HeapObject::kMapOffset));
|
| + __ li(a2, Operand(Smi::FromInt(length)));
|
| + __ sw(a2, FieldMemOperand(v0, FixedArray::kLengthOffset));
|
| +
|
| + // If this block context is nested in the global context we get a smi
|
| + // sentinel instead of a function. The block context should get the
|
| + // canonical empty function of the global context as its closure which
|
| + // we still have to look up.
|
| + Label after_sentinel;
|
| + __ JumpIfNotSmi(a3, &after_sentinel);
|
| + if (FLAG_debug_code) {
|
| + const char* message = "Expected 0 as a Smi sentinel";
|
| + __ Assert(eq, message, a3, Operand(zero_reg));
|
| + }
|
| + __ lw(a3, GlobalObjectOperand());
|
| + __ lw(a3, FieldMemOperand(a3, GlobalObject::kGlobalContextOffset));
|
| + __ lw(a3, ContextOperand(a3, Context::CLOSURE_INDEX));
|
| + __ bind(&after_sentinel);
|
| +
|
| + // Setup the fixed slots.
|
| + __ sw(a3, ContextOperand(v0, Context::CLOSURE_INDEX));
|
| + __ sw(cp, ContextOperand(v0, Context::PREVIOUS_INDEX));
|
| + __ sw(a1, ContextOperand(v0, Context::EXTENSION_INDEX));
|
| +
|
| + // Copy the global object from the previous context.
|
| + __ lw(a1, ContextOperand(cp, Context::GLOBAL_INDEX));
|
| + __ sw(a1, ContextOperand(v0, Context::GLOBAL_INDEX));
|
| +
|
| + // Initialize the rest of the slots to the hole value.
|
| + __ LoadRoot(a1, Heap::kTheHoleValueRootIndex);
|
| + for (int i = 0; i < slots_; i++) {
|
| + __ sw(a1, ContextOperand(v0, i + Context::MIN_CONTEXT_SLOTS));
|
| + }
|
| +
|
| + // Remove the on-stack argument and return.
|
| + __ mov(cp, v0);
|
| + __ Addu(sp, sp, Operand(2 * kPointerSize));
|
| + __ Ret();
|
| +
|
| + // Need to collect. Call into runtime system.
|
| + __ bind(&gc);
|
| + __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
|
| +}
|
| +
|
| +
|
| void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
|
| // Stack layout on entry:
|
| // [sp]: constant elements.
|
| @@ -886,7 +951,7 @@ void FloatingPointHelper::CallCCodeForDoubleOperation(
|
| }
|
|
|
|
|
| -bool WriteInt32ToHeapNumberStub::CompilingCallsToThisStubIsGCSafe() {
|
| +bool WriteInt32ToHeapNumberStub::IsPregenerated() {
|
| // These variants are compiled ahead of time. See next method.
|
| if (the_int_.is(a1) &&
|
| the_heap_number_.is(v0) &&
|
| @@ -910,8 +975,8 @@ bool WriteInt32ToHeapNumberStub::CompilingCallsToThisStubIsGCSafe() {
|
| void WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime() {
|
| WriteInt32ToHeapNumberStub stub1(a1, v0, a2, a3);
|
| WriteInt32ToHeapNumberStub stub2(a2, v0, a3, a0);
|
| - Handle<Code> code1 = stub1.GetCode();
|
| - Handle<Code> code2 = stub2.GetCode();
|
| + stub1.GetCode()->set_is_pregenerated(true);
|
| + stub2.GetCode()->set_is_pregenerated(true);
|
| }
|
|
|
|
|
| @@ -1282,6 +1347,8 @@ static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
|
| __ Move(f12, a0, a1);
|
| __ Move(f14, a2, a3);
|
| }
|
| +
|
| + AllowExternalCallThatCantCauseGC scope(masm);
|
| __ CallCFunction(ExternalReference::compare_doubles(masm->isolate()),
|
| 0, 2);
|
| __ pop(ra); // Because this function returns int, result is in v0.
|
| @@ -1313,7 +1380,7 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
|
| // If either operand is a JS object or an oddball value, then they are
|
| // not equal since their pointers are different.
|
| // There is no test for undetectability in strict equality.
|
| - STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
|
| + STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
|
| Label first_non_object;
|
| // Get the type of the first operand into a2 and compare it with
|
| // FIRST_SPEC_OBJECT_TYPE.
|
| @@ -2179,6 +2246,9 @@ void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
|
|
|
|
|
| void BinaryOpStub::Generate(MacroAssembler* masm) {
|
| + // Explicitly allow generation of nested stubs. It is safe here because
|
| + // generation code does not use any raw pointers.
|
| + AllowStubCallsScope allow_stub_calls(masm, true);
|
| switch (operands_type_) {
|
| case BinaryOpIC::UNINITIALIZED:
|
| GenerateTypeTransition(masm);
|
| @@ -2983,9 +3053,9 @@ void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
|
| __ Ret();
|
| } else {
|
| // Tail call that writes the int32 in a2 to the heap number in v0, using
|
| - // a3 and a1 as scratch. v0 is preserved and returned.
|
| + // a3 and a0 as scratch. v0 is preserved and returned.
|
| __ mov(a0, t1);
|
| - WriteInt32ToHeapNumberStub stub(a2, v0, a3, a1);
|
| + WriteInt32ToHeapNumberStub stub(a2, v0, a3, a0);
|
| __ TailCallStub(&stub);
|
| }
|
|
|
| @@ -3542,17 +3612,30 @@ bool CEntryStub::IsPregenerated() {
|
|
|
|
|
| void CodeStub::GenerateStubsAheadOfTime() {
|
| + CEntryStub::GenerateAheadOfTime();
|
| WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime();
|
| + StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
|
| + RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
|
| }
|
|
|
|
|
| void CodeStub::GenerateFPStubs() {
|
| CEntryStub save_doubles(1, kSaveFPRegs);
|
| Handle<Code> code = save_doubles.GetCode();
|
| + code->set_is_pregenerated(true);
|
| + StoreBufferOverflowStub stub(kSaveFPRegs);
|
| + stub.GetCode()->set_is_pregenerated(true);
|
| code->GetIsolate()->set_fp_stubs_generated(true);
|
| }
|
|
|
|
|
| +void CEntryStub::GenerateAheadOfTime() {
|
| + CEntryStub stub(1, kDontSaveFPRegs);
|
| + Handle<Code> code = stub.GetCode();
|
| + code->set_is_pregenerated(true);
|
| +}
|
| +
|
| +
|
| void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) {
|
| __ Throw(v0);
|
| }
|
| @@ -3575,17 +3658,17 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
|
| // s1: pointer to the first argument (C callee-saved)
|
| // s2: pointer to builtin function (C callee-saved)
|
|
|
| + Isolate* isolate = masm->isolate();
|
| +
|
| if (do_gc) {
|
| // Move result passed in v0 into a0 to call PerformGC.
|
| __ mov(a0, v0);
|
| __ PrepareCallCFunction(1, 0, a1);
|
| - __ CallCFunction(
|
| - ExternalReference::perform_gc_function(masm->isolate()),
|
| - 1, 0);
|
| + __ CallCFunction(ExternalReference::perform_gc_function(isolate), 1, 0);
|
| }
|
|
|
| ExternalReference scope_depth =
|
| - ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
|
| + ExternalReference::heap_always_allocate_scope_depth(isolate);
|
| if (always_allocate) {
|
| __ li(a0, Operand(scope_depth));
|
| __ lw(a1, MemOperand(a0));
|
| @@ -3674,18 +3757,16 @@ void CEntryStub::GenerateCore(MacroAssembler* masm,
|
| v0, Operand(reinterpret_cast<int32_t>(out_of_memory)));
|
|
|
| // Retrieve the pending exception and clear the variable.
|
| - __ li(t0,
|
| - Operand(ExternalReference::the_hole_value_location(masm->isolate())));
|
| - __ lw(a3, MemOperand(t0));
|
| + __ li(a3, Operand(isolate->factory()->the_hole_value()));
|
| __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
|
| - masm->isolate())));
|
| + isolate)));
|
| __ lw(v0, MemOperand(t0));
|
| __ sw(a3, MemOperand(t0));
|
|
|
| // Special handling of termination exceptions which are uncatchable
|
| // by javascript code.
|
| __ Branch(throw_termination_exception, eq,
|
| - v0, Operand(masm->isolate()->factory()->termination_exception()));
|
| + v0, Operand(isolate->factory()->termination_exception()));
|
|
|
| // Handle normal exception.
|
| __ jmp(throw_normal_exception);
|
| @@ -3768,6 +3849,7 @@ void CEntryStub::Generate(MacroAssembler* masm) {
|
|
|
| void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
| Label invoke, exit;
|
| + Isolate* isolate = masm->isolate();
|
|
|
| // Registers:
|
| // a0: entry address
|
| @@ -3805,7 +3887,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
| __ li(t2, Operand(Smi::FromInt(marker)));
|
| __ li(t1, Operand(Smi::FromInt(marker)));
|
| __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
|
| - masm->isolate())));
|
| + isolate)));
|
| __ lw(t0, MemOperand(t0));
|
| __ Push(t3, t2, t1, t0);
|
| // Setup frame pointer for the frame to be pushed.
|
| @@ -3829,8 +3911,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
|
|
| // If this is the outermost JS call, set js_entry_sp value.
|
| Label non_outermost_js;
|
| - ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress,
|
| - masm->isolate());
|
| + ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
|
| __ li(t1, Operand(ExternalReference(js_entry_sp)));
|
| __ lw(t2, MemOperand(t1));
|
| __ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
|
| @@ -3853,7 +3934,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
| // Coming in here the fp will be invalid because the PushTryHandler below
|
| // sets it to 0 to signal the existence of the JSEntry frame.
|
| __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
|
| - masm->isolate())));
|
| + isolate)));
|
| __ sw(v0, MemOperand(t0)); // We come back from 'invoke'. result is in v0.
|
| __ li(v0, Operand(reinterpret_cast<int32_t>(Failure::Exception())));
|
| __ b(&exit); // b exposes branch delay slot.
|
| @@ -3868,11 +3949,9 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
| // saved values before returning a failure to C.
|
|
|
| // Clear any pending exceptions.
|
| - __ li(t0,
|
| - Operand(ExternalReference::the_hole_value_location(masm->isolate())));
|
| - __ lw(t1, MemOperand(t0));
|
| + __ li(t1, Operand(isolate->factory()->the_hole_value()));
|
| __ li(t0, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
|
| - masm->isolate())));
|
| + isolate)));
|
| __ sw(t1, MemOperand(t0));
|
|
|
| // Invoke the function by calling through JS entry trampoline builtin.
|
| @@ -3895,7 +3974,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
|
|
| if (is_construct) {
|
| ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
|
| - masm->isolate());
|
| + isolate);
|
| __ li(t0, Operand(construct_entry));
|
| } else {
|
| ExternalReference entry(Builtins::kJSEntryTrampoline, masm->isolate());
|
| @@ -3923,7 +4002,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
|
| // Restore the top frame descriptors from the stack.
|
| __ pop(t1);
|
| __ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
|
| - masm->isolate())));
|
| + isolate)));
|
| __ sw(t1, MemOperand(t0));
|
|
|
| // Reset the stack to the callee saved registers.
|
| @@ -4541,6 +4620,8 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
| static const int kSubjectOffset = 2 * kPointerSize;
|
| static const int kJSRegExpOffset = 3 * kPointerSize;
|
|
|
| + Isolate* isolate = masm->isolate();
|
| +
|
| Label runtime, invoke_regexp;
|
|
|
| // Allocation of registers for this function. These are in callee save
|
| @@ -4556,9 +4637,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
| // Ensure that a RegExp stack is allocated.
|
| ExternalReference address_of_regexp_stack_memory_address =
|
| ExternalReference::address_of_regexp_stack_memory_address(
|
| - masm->isolate());
|
| + isolate);
|
| ExternalReference address_of_regexp_stack_memory_size =
|
| - ExternalReference::address_of_regexp_stack_memory_size(masm->isolate());
|
| + ExternalReference::address_of_regexp_stack_memory_size(isolate);
|
| __ li(a0, Operand(address_of_regexp_stack_memory_size));
|
| __ lw(a0, MemOperand(a0, 0));
|
| __ Branch(&runtime, eq, a0, Operand(zero_reg));
|
| @@ -4639,7 +4720,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
| FieldMemOperand(a0, JSArray::kElementsOffset));
|
| __ lw(a0, FieldMemOperand(last_match_info_elements, HeapObject::kMapOffset));
|
| __ Branch(&runtime, ne, a0, Operand(
|
| - masm->isolate()->factory()->fixed_array_map()));
|
| + isolate->factory()->fixed_array_map()));
|
| // Check that the last match info has space for the capture registers and the
|
| // additional information.
|
| __ lw(a0,
|
| @@ -4730,7 +4811,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
| // subject: Subject string
|
| // regexp_data: RegExp data (FixedArray)
|
| // All checks done. Now push arguments for native regexp code.
|
| - __ IncrementCounter(masm->isolate()->counters()->regexp_entry_native(),
|
| + __ IncrementCounter(isolate->counters()->regexp_entry_native(),
|
| 1, a0, a2);
|
|
|
| // Isolates: note we add an additional parameter here (isolate pointer).
|
| @@ -4770,7 +4851,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
|
|
| // Argument 5: static offsets vector buffer.
|
| __ li(a0, Operand(
|
| - ExternalReference::address_of_static_offsets_vector(masm->isolate())));
|
| + ExternalReference::address_of_static_offsets_vector(isolate)));
|
| __ sw(a0, MemOperand(sp, 1 * kPointerSize));
|
|
|
| // For arguments 4 and 3 get string length, calculate start of string data
|
| @@ -4828,11 +4909,9 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
| // stack overflow (on the backtrack stack) was detected in RegExp code but
|
| // haven't created the exception yet. Handle that in the runtime system.
|
| // TODO(592): Rerunning the RegExp to get the stack overflow exception.
|
| - __ li(a1, Operand(
|
| - ExternalReference::the_hole_value_location(masm->isolate())));
|
| - __ lw(a1, MemOperand(a1, 0));
|
| + __ li(a1, Operand(isolate->factory()->the_hole_value()));
|
| __ li(a2, Operand(ExternalReference(Isolate::kPendingExceptionAddress,
|
| - masm->isolate())));
|
| + isolate)));
|
| __ lw(v0, MemOperand(a2, 0));
|
| __ Branch(&runtime, eq, v0, Operand(a1));
|
|
|
| @@ -4850,7 +4929,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
|
|
| __ bind(&failure);
|
| // For failure and exception return null.
|
| - __ li(v0, Operand(masm->isolate()->factory()->null_value()));
|
| + __ li(v0, Operand(isolate->factory()->null_value()));
|
| __ Addu(sp, sp, Operand(4 * kPointerSize));
|
| __ Ret();
|
|
|
| @@ -4892,7 +4971,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
|
|
|
| // Get the static offsets vector filled by the native regexp code.
|
| ExternalReference address_of_static_offsets_vector =
|
| - ExternalReference::address_of_static_offsets_vector(masm->isolate());
|
| + ExternalReference::address_of_static_offsets_vector(isolate);
|
| __ li(a2, Operand(address_of_static_offsets_vector));
|
|
|
| // a1: number of capture registers
|
| @@ -5017,6 +5096,22 @@ void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
|
| }
|
|
|
|
|
| +void CallFunctionStub::FinishCode(Code* code) {
|
| + code->set_has_function_cache(false);
|
| +}
|
| +
|
| +
|
| +void CallFunctionStub::Clear(Heap* heap, Address address) {
|
| + UNREACHABLE();
|
| +}
|
| +
|
| +
|
| +Object* CallFunctionStub::GetCachedValue(Address address) {
|
| + UNREACHABLE();
|
| + return NULL;
|
| +}
|
| +
|
| +
|
| void CallFunctionStub::Generate(MacroAssembler* masm) {
|
| Label slow, non_function;
|
|
|
| @@ -7095,13 +7190,29 @@ struct AheadOfTimeWriteBarrierStubList {
|
|
|
|
|
| struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
|
| - // TODO(1696): Fill this in for MIPS.
|
| + // Used in RegExpExecStub.
|
| + { s2, s0, t3, EMIT_REMEMBERED_SET },
|
| + { s2, a2, t3, EMIT_REMEMBERED_SET },
|
| + // Used in CompileArrayPushCall.
|
| + // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
|
| + // Also used in KeyedStoreIC::GenerateGeneric.
|
| + { a3, t0, t1, EMIT_REMEMBERED_SET },
|
| + // Used in CompileStoreGlobal.
|
| + { t0, a1, a2, OMIT_REMEMBERED_SET },
|
| + // Used in StoreStubCompiler::CompileStoreField via GenerateStoreField.
|
| + { a1, a2, a3, EMIT_REMEMBERED_SET },
|
| + { a3, a2, a1, EMIT_REMEMBERED_SET },
|
| + // Used in KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
|
| + { a2, a1, a3, EMIT_REMEMBERED_SET },
|
| + { a3, a1, a2, EMIT_REMEMBERED_SET },
|
| + // KeyedStoreStubCompiler::GenerateStoreFastElement.
|
| + { t0, a2, a3, EMIT_REMEMBERED_SET },
|
| // Null termination.
|
| { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET}
|
| };
|
|
|
|
|
| -bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() {
|
| +bool RecordWriteStub::IsPregenerated() {
|
| for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
|
| !entry->object.is(no_reg);
|
| entry++) {
|
| @@ -7113,15 +7224,18 @@ bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() {
|
| return true;
|
| }
|
| }
|
| - return true; // TODO(1696): Should be false.
|
| + return false;
|
| +}
|
| +
|
| +
|
| +bool StoreBufferOverflowStub::IsPregenerated() {
|
| + return save_doubles_ == kDontSaveFPRegs || ISOLATE->fp_stubs_generated();
|
| }
|
|
|
|
|
| void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
|
| StoreBufferOverflowStub stub1(kDontSaveFPRegs);
|
| - stub1.GetCode();
|
| - StoreBufferOverflowStub stub2(kSaveFPRegs);
|
| - stub2.GetCode();
|
| + stub1.GetCode()->set_is_pregenerated(true);
|
| }
|
|
|
|
|
| @@ -7134,7 +7248,7 @@ void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() {
|
| entry->address,
|
| entry->action,
|
| kDontSaveFPRegs);
|
| - stub.GetCode();
|
| + stub.GetCode()->set_is_pregenerated(true);
|
| }
|
| }
|
|
|
| @@ -7159,8 +7273,11 @@ void RecordWriteStub::Generate(MacroAssembler* masm) {
|
| __ nop();
|
|
|
| if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
|
| - __ RememberedSetHelper(
|
| - address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
|
| + __ RememberedSetHelper(object_,
|
| + address_,
|
| + value_,
|
| + save_fp_regs_mode_,
|
| + MacroAssembler::kReturnAtEnd);
|
| }
|
| __ Ret();
|
|
|
| @@ -7185,7 +7302,7 @@ void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
|
| Label dont_need_remembered_set;
|
|
|
| __ lw(regs_.scratch0(), MemOperand(regs_.address(), 0));
|
| - __ JumpIfNotInNewSpace(regs_.scratch0(),
|
| + __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
|
| regs_.scratch0(),
|
| &dont_need_remembered_set);
|
|
|
| @@ -7201,8 +7318,11 @@ void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
|
| masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
|
| InformIncrementalMarker(masm, mode);
|
| regs_.Restore(masm);
|
| - __ RememberedSetHelper(
|
| - address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
|
| + __ RememberedSetHelper(object_,
|
| + address_,
|
| + value_,
|
| + save_fp_regs_mode_,
|
| + MacroAssembler::kReturnAtEnd);
|
|
|
| __ bind(&dont_need_remembered_set);
|
| }
|
| @@ -7264,8 +7384,11 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
|
|
|
| regs_.Restore(masm);
|
| if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
|
| - __ RememberedSetHelper(
|
| - address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
|
| + __ RememberedSetHelper(object_,
|
| + address_,
|
| + value_,
|
| + save_fp_regs_mode_,
|
| + MacroAssembler::kReturnAtEnd);
|
| } else {
|
| __ Ret();
|
| }
|
| @@ -7305,8 +7428,11 @@ void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
|
|
|
| regs_.Restore(masm);
|
| if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
|
| - __ RememberedSetHelper(
|
| - address_, value_, save_fp_regs_mode_, MacroAssembler::kReturnAtEnd);
|
| + __ RememberedSetHelper(object_,
|
| + address_,
|
| + value_,
|
| + save_fp_regs_mode_,
|
| + MacroAssembler::kReturnAtEnd);
|
| } else {
|
| __ Ret();
|
| }
|
|
|