Index: src/x64/code-stubs-x64.cc |
=================================================================== |
--- src/x64/code-stubs-x64.cc (revision 9531) |
+++ src/x64/code-stubs-x64.cc (working copy) |
@@ -233,6 +233,8 @@ |
// The stub expects its argument on the stack and returns its result in tos_: |
// zero for false, and a non-zero value for true. |
void ToBooleanStub::Generate(MacroAssembler* masm) { |
+ // This stub overrides SometimesSetsUpAFrame() to return false. That means |
+ // we cannot call anything that could cause a GC from this stub. |
Label patch; |
const Register argument = rax; |
const Register map = rdx; |
@@ -328,6 +330,25 @@ |
} |
+void StoreBufferOverflowStub::Generate(MacroAssembler* masm) { |
+ __ PushCallerSaved(save_doubles_); |
+ const int argument_count = 1; |
+ __ PrepareCallCFunction(argument_count); |
+#ifdef _WIN64 |
+ __ LoadAddress(rcx, ExternalReference::isolate_address()); |
+#else |
+ __ LoadAddress(rdi, ExternalReference::isolate_address()); |
+#endif |
+ |
+ AllowExternalCallThatCantCauseGC scope(masm); |
+ __ CallCFunction( |
+ ExternalReference::store_buffer_overflow_function(masm->isolate()), |
+ argument_count); |
+ __ PopCallerSaved(save_doubles_); |
+ __ ret(0); |
+} |
+ |
+ |
void ToBooleanStub::CheckOddball(MacroAssembler* masm, |
Type type, |
Heap::RootListIndex value, |
@@ -622,12 +643,13 @@ |
__ jmp(&heapnumber_allocated); |
__ bind(&slow_allocate_heapnumber); |
- __ EnterInternalFrame(); |
- __ push(rax); |
- __ CallRuntime(Runtime::kNumberAlloc, 0); |
- __ movq(rcx, rax); |
- __ pop(rax); |
- __ LeaveInternalFrame(); |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ __ push(rax); |
+ __ CallRuntime(Runtime::kNumberAlloc, 0); |
+ __ movq(rcx, rax); |
+ __ pop(rax); |
+ } |
__ bind(&heapnumber_allocated); |
// rcx: allocated 'empty' number |
@@ -751,6 +773,10 @@ |
void BinaryOpStub::Generate(MacroAssembler* masm) { |
+ // Explicitly allow generation of nested stubs. It is safe here because |
+ // generation code does not use any raw pointers. |
+ AllowStubCallsScope allow_stub_calls(masm, true); |
+ |
switch (operands_type_) { |
case BinaryOpIC::UNINITIALIZED: |
GenerateTypeTransition(masm); |
@@ -1453,11 +1479,12 @@ |
__ addq(rsp, Immediate(kDoubleSize)); |
// We return the value in xmm1 without adding it to the cache, but |
// we cause a scavenging GC so that future allocations will succeed. |
- __ EnterInternalFrame(); |
- // Allocate an unused object bigger than a HeapNumber. |
- __ Push(Smi::FromInt(2 * kDoubleSize)); |
- __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
- __ LeaveInternalFrame(); |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ // Allocate an unused object bigger than a HeapNumber. |
+ __ Push(Smi::FromInt(2 * kDoubleSize)); |
+ __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace); |
+ } |
__ Ret(); |
} |
@@ -1473,10 +1500,11 @@ |
__ bind(&runtime_call); |
__ AllocateHeapNumber(rax, rdi, &skip_cache); |
__ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1); |
- __ EnterInternalFrame(); |
- __ push(rax); |
- __ CallRuntime(RuntimeFunction(), 1); |
- __ LeaveInternalFrame(); |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ __ push(rax); |
+ __ CallRuntime(RuntimeFunction(), 1); |
+ } |
__ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset)); |
__ Ret(); |
} |
@@ -2670,12 +2698,18 @@ |
// Store last subject and last input. |
__ movq(rax, Operand(rsp, kSubjectOffset)); |
__ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax); |
- __ movq(rcx, rbx); |
- __ RecordWrite(rcx, RegExpImpl::kLastSubjectOffset, rax, rdi); |
+ __ RecordWriteField(rbx, |
+ RegExpImpl::kLastSubjectOffset, |
+ rax, |
+ rdi, |
+ kDontSaveFPRegs); |
__ movq(rax, Operand(rsp, kSubjectOffset)); |
__ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax); |
- __ movq(rcx, rbx); |
- __ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi); |
+ __ RecordWriteField(rbx, |
+ RegExpImpl::kLastInputOffset, |
+ rax, |
+ rdi, |
+ kDontSaveFPRegs); |
// Get the static offsets vector filled by the native regexp code. |
__ LoadAddress(rcx, |
@@ -3231,6 +3265,22 @@ |
} |
+void CallFunctionStub::FinishCode(Code* code) { |
+ code->set_has_function_cache(false); |
+} |
+ |
+ |
+void CallFunctionStub::Clear(Heap* heap, Address address) { |
+ UNREACHABLE(); |
+} |
+ |
+ |
+Object* CallFunctionStub::GetCachedValue(Address address) { |
+ UNREACHABLE(); |
+ return NULL; |
+} |
+ |
+ |
void CallFunctionStub::Generate(MacroAssembler* masm) { |
Label slow, non_function; |
@@ -3319,6 +3369,35 @@ |
} |
+bool CEntryStub::IsPregenerated() { |
+#ifdef _WIN64 |
+ return result_size_ == 1; |
+#else |
+ return true; |
+#endif |
+} |
+ |
+ |
+void CodeStub::GenerateStubsAheadOfTime() { |
+ CEntryStub::GenerateAheadOfTime(); |
+ StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(); |
+ // It is important that the store buffer overflow stubs are generated first. |
+ RecordWriteStub::GenerateFixedRegStubsAheadOfTime(); |
+} |
+ |
+ |
+void CodeStub::GenerateFPStubs() { |
+} |
+ |
+ |
+void CEntryStub::GenerateAheadOfTime() { |
+ CEntryStub stub(1, kDontSaveFPRegs); |
+ stub.GetCode()->set_is_pregenerated(true); |
+ CEntryStub save_doubles(1, kSaveFPRegs); |
+ save_doubles.GetCode()->set_is_pregenerated(true); |
+} |
+ |
+ |
void CEntryStub::GenerateThrowTOS(MacroAssembler* masm) { |
// Throw exception in eax. |
__ Throw(rax); |
@@ -3757,6 +3836,7 @@ |
__ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex); |
__ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex); |
} else { |
+ // Get return address and delta to inlined map check. |
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
__ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax); |
@@ -3791,9 +3871,11 @@ |
__ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex); |
} else { |
// Store offset of true in the root array at the inline check site. |
- ASSERT((Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias |
- == 0xB0 - 0x100); |
- __ movl(rax, Immediate(0xB0)); // TrueValue is at -10 * kPointerSize. |
+ int true_offset = 0x100 + |
+ (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
+ // Assert it is a 1-byte signed value. |
+ ASSERT(true_offset >= 0 && true_offset < 0x100); |
+ __ movl(rax, Immediate(true_offset)); |
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
__ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
@@ -3812,9 +3894,11 @@ |
__ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex); |
} else { |
// Store offset of false in the root array at the inline check site. |
- ASSERT((Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias |
- == 0xB8 - 0x100); |
- __ movl(rax, Immediate(0xB8)); // FalseValue is at -9 * kPointerSize. |
+ int false_offset = 0x100 + |
+ (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias; |
+ // Assert it is a 1-byte signed value. |
+ ASSERT(false_offset >= 0 && false_offset < 0x100); |
+ __ movl(rax, Immediate(false_offset)); |
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize)); |
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize)); |
__ movb(Operand(kScratchRegister, kOffsetToResultValue), rax); |
@@ -5271,12 +5355,13 @@ |
// Call the runtime system in a fresh internal frame. |
ExternalReference miss = |
ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); |
- __ EnterInternalFrame(); |
- __ push(rdx); |
- __ push(rax); |
- __ Push(Smi::FromInt(op_)); |
- __ CallExternalReference(miss, 3); |
- __ LeaveInternalFrame(); |
+ { |
+ FrameScope scope(masm, StackFrame::INTERNAL); |
+ __ push(rdx); |
+ __ push(rax); |
+ __ Push(Smi::FromInt(op_)); |
+ __ CallExternalReference(miss, 3); |
+ } |
// Compute the entry point of the rewritten stub. |
__ lea(rdi, FieldOperand(rax, Code::kHeaderSize)); |
@@ -5407,6 +5492,8 @@ |
void StringDictionaryLookupStub::Generate(MacroAssembler* masm) { |
+ // This stub overrides SometimesSetsUpAFrame() to return false. That means |
+ // we cannot call anything that could cause a GC from this stub. |
// Stack frame on entry: |
// esp[0 * kPointerSize]: return address. |
// esp[1 * kPointerSize]: key's hash. |
@@ -5492,6 +5579,279 @@ |
} |
+struct AheadOfTimeWriteBarrierStubList { |
+ Register object, value, address; |
+ RememberedSetAction action; |
+}; |
+ |
+ |
+struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { |
+ // Used in RegExpExecStub. |
+ { rbx, rax, rdi, EMIT_REMEMBERED_SET }, |
+ // Used in CompileArrayPushCall. |
+ { rbx, rcx, rdx, EMIT_REMEMBERED_SET }, |
+ // Used in CompileStoreGlobal. |
+ { rbx, rcx, rdx, OMIT_REMEMBERED_SET }, |
+ // Used in StoreStubCompiler::CompileStoreField and |
+ // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField. |
+ { rdx, rcx, rbx, EMIT_REMEMBERED_SET }, |
+ // GenerateStoreField calls the stub with two different permutations of |
+ // registers. This is the second. |
+ { rbx, rcx, rdx, EMIT_REMEMBERED_SET }, |
+ // StoreIC::GenerateNormal via GenerateDictionaryStore. |
+ { rbx, r8, r9, EMIT_REMEMBERED_SET }, |
+ // KeyedStoreIC::GenerateGeneric. |
+ { rbx, rdx, rcx, EMIT_REMEMBERED_SET}, |
+ // KeyedStoreStubCompiler::GenerateStoreFastElement. |
+ { rdi, rdx, rcx, EMIT_REMEMBERED_SET}, |
+ // Null termination. |
+ { no_reg, no_reg, no_reg, EMIT_REMEMBERED_SET} |
+}; |
+ |
+ |
+bool RecordWriteStub::IsPregenerated() { |
+ for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
+ !entry->object.is(no_reg); |
+ entry++) { |
+ if (object_.is(entry->object) && |
+ value_.is(entry->value) && |
+ address_.is(entry->address) && |
+ remembered_set_action_ == entry->action && |
+ save_fp_regs_mode_ == kDontSaveFPRegs) { |
+ return true; |
+ } |
+ } |
+ return false; |
+} |
+ |
+ |
+void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() { |
+ StoreBufferOverflowStub stub1(kDontSaveFPRegs); |
+ stub1.GetCode()->set_is_pregenerated(true); |
+ StoreBufferOverflowStub stub2(kSaveFPRegs); |
+ stub2.GetCode()->set_is_pregenerated(true); |
+} |
+ |
+ |
+void RecordWriteStub::GenerateFixedRegStubsAheadOfTime() { |
+ for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime; |
+ !entry->object.is(no_reg); |
+ entry++) { |
+ RecordWriteStub stub(entry->object, |
+ entry->value, |
+ entry->address, |
+ entry->action, |
+ kDontSaveFPRegs); |
+ stub.GetCode()->set_is_pregenerated(true); |
+ } |
+} |
+ |
+ |
+// Takes the input in 3 registers: address_ value_ and object_. A pointer to |
+// the value has just been written into the object, now this stub makes sure |
+// we keep the GC informed. The word in the object where the value has been |
+// written is in the address register. |
+void RecordWriteStub::Generate(MacroAssembler* masm) { |
+ Label skip_to_incremental_noncompacting; |
+ Label skip_to_incremental_compacting; |
+ |
+ // The first two instructions are generated with labels so as to get the |
+ // offset fixed up correctly by the bind(Label*) call. We patch it back and |
+ // forth between a compare instructions (a nop in this position) and the |
+ // real branch when we start and stop incremental heap marking. |
+ // See RecordWriteStub::Patch for details. |
+ __ jmp(&skip_to_incremental_noncompacting, Label::kNear); |
+ __ jmp(&skip_to_incremental_compacting, Label::kFar); |
+ |
+ if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
+ __ RememberedSetHelper(object_, |
+ address_, |
+ value_, |
+ save_fp_regs_mode_, |
+ MacroAssembler::kReturnAtEnd); |
+ } else { |
+ __ ret(0); |
+ } |
+ |
+ __ bind(&skip_to_incremental_noncompacting); |
+ GenerateIncremental(masm, INCREMENTAL); |
+ |
+ __ bind(&skip_to_incremental_compacting); |
+ GenerateIncremental(masm, INCREMENTAL_COMPACTION); |
+ |
+ // Initial mode of the stub is expected to be STORE_BUFFER_ONLY. |
+ // Will be checked in IncrementalMarking::ActivateGeneratedStub. |
+ masm->set_byte_at(0, kTwoByteNopInstruction); |
+ masm->set_byte_at(2, kFiveByteNopInstruction); |
+} |
+ |
+ |
+void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) { |
+ regs_.Save(masm); |
+ |
+ if (remembered_set_action_ == EMIT_REMEMBERED_SET) { |
+ Label dont_need_remembered_set; |
+ |
+ __ movq(regs_.scratch0(), Operand(regs_.address(), 0)); |
+ __ JumpIfNotInNewSpace(regs_.scratch0(), |
+ regs_.scratch0(), |
+ &dont_need_remembered_set); |
+ |
+ __ CheckPageFlag(regs_.object(), |
+ regs_.scratch0(), |
+ 1 << MemoryChunk::SCAN_ON_SCAVENGE, |
+ not_zero, |
+ &dont_need_remembered_set); |
+ |
+ // First notify the incremental marker if necessary, then update the |
+ // remembered set. |
+ CheckNeedsToInformIncrementalMarker( |
+ masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode); |
+ InformIncrementalMarker(masm, mode); |
+ regs_.Restore(masm); |
+ __ RememberedSetHelper(object_, |
+ address_, |
+ value_, |
+ save_fp_regs_mode_, |
+ MacroAssembler::kReturnAtEnd); |
+ |
+ __ bind(&dont_need_remembered_set); |
+ } |
+ |
+ CheckNeedsToInformIncrementalMarker( |
+ masm, kReturnOnNoNeedToInformIncrementalMarker, mode); |
+ InformIncrementalMarker(masm, mode); |
+ regs_.Restore(masm); |
+ __ ret(0); |
+} |
+ |
+ |
+void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) { |
+ regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_); |
+#ifdef _WIN64 |
+ Register arg3 = r8; |
+ Register arg2 = rdx; |
+ Register arg1 = rcx; |
+#else |
+ Register arg3 = rdx; |
+ Register arg2 = rsi; |
+ Register arg1 = rdi; |
+#endif |
+ Register address = |
+ arg1.is(regs_.address()) ? kScratchRegister : regs_.address(); |
+ ASSERT(!address.is(regs_.object())); |
+ ASSERT(!address.is(arg1)); |
+ __ Move(address, regs_.address()); |
+ __ Move(arg1, regs_.object()); |
+ if (mode == INCREMENTAL_COMPACTION) { |
+ // TODO(gc) Can we just set address arg2 in the beginning? |
+ __ Move(arg2, address); |
+ } else { |
+ ASSERT(mode == INCREMENTAL); |
+ __ movq(arg2, Operand(address, 0)); |
+ } |
+ __ LoadAddress(arg3, ExternalReference::isolate_address()); |
+ int argument_count = 3; |
+ |
+ AllowExternalCallThatCantCauseGC scope(masm); |
+ __ PrepareCallCFunction(argument_count); |
+ if (mode == INCREMENTAL_COMPACTION) { |
+ __ CallCFunction( |
+ ExternalReference::incremental_evacuation_record_write_function( |
+ masm->isolate()), |
+ argument_count); |
+ } else { |
+ ASSERT(mode == INCREMENTAL); |
+ __ CallCFunction( |
+ ExternalReference::incremental_marking_record_write_function( |
+ masm->isolate()), |
+ argument_count); |
+ } |
+ regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_); |
+} |
+ |
+ |
+void RecordWriteStub::CheckNeedsToInformIncrementalMarker( |
+ MacroAssembler* masm, |
+ OnNoNeedToInformIncrementalMarker on_no_need, |
+ Mode mode) { |
+ Label on_black; |
+ Label need_incremental; |
+ Label need_incremental_pop_object; |
+ |
+ // Let's look at the color of the object: If it is not black we don't have |
+ // to inform the incremental marker. |
+ __ JumpIfBlack(regs_.object(), |
+ regs_.scratch0(), |
+ regs_.scratch1(), |
+ &on_black, |
+ Label::kNear); |
+ |
+ regs_.Restore(masm); |
+ if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
+ __ RememberedSetHelper(object_, |
+ address_, |
+ value_, |
+ save_fp_regs_mode_, |
+ MacroAssembler::kReturnAtEnd); |
+ } else { |
+ __ ret(0); |
+ } |
+ |
+ __ bind(&on_black); |
+ |
+ // Get the value from the slot. |
+ __ movq(regs_.scratch0(), Operand(regs_.address(), 0)); |
+ |
+ if (mode == INCREMENTAL_COMPACTION) { |
+ Label ensure_not_white; |
+ |
+ __ CheckPageFlag(regs_.scratch0(), // Contains value. |
+ regs_.scratch1(), // Scratch. |
+ MemoryChunk::kEvacuationCandidateMask, |
+ zero, |
+ &ensure_not_white, |
+ Label::kNear); |
+ |
+ __ CheckPageFlag(regs_.object(), |
+ regs_.scratch1(), // Scratch. |
+ MemoryChunk::kSkipEvacuationSlotsRecordingMask, |
+ zero, |
+ &need_incremental); |
+ |
+ __ bind(&ensure_not_white); |
+ } |
+ |
+ // We need an extra register for this, so we push the object register |
+ // temporarily. |
+ __ push(regs_.object()); |
+ __ EnsureNotWhite(regs_.scratch0(), // The value. |
+ regs_.scratch1(), // Scratch. |
+ regs_.object(), // Scratch. |
+ &need_incremental_pop_object, |
+ Label::kNear); |
+ __ pop(regs_.object()); |
+ |
+ regs_.Restore(masm); |
+ if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) { |
+ __ RememberedSetHelper(object_, |
+ address_, |
+ value_, |
+ save_fp_regs_mode_, |
+ MacroAssembler::kReturnAtEnd); |
+ } else { |
+ __ ret(0); |
+ } |
+ |
+ __ bind(&need_incremental_pop_object); |
+ __ pop(regs_.object()); |
+ |
+ __ bind(&need_incremental); |
+ |
+ // Fall through when we need to inform the incremental marker. |
+} |
+ |
+ |
#undef __ |
} } // namespace v8::internal |