Chromium Code Reviews| Index: runtime/vm/assembler_x64.cc |
| =================================================================== |
| --- runtime/vm/assembler_x64.cc (revision 27208) |
| +++ runtime/vm/assembler_x64.cc (working copy) |
| @@ -65,6 +65,37 @@ |
| #undef __ |
| +Assembler::Assembler(bool use_far_branches) |
| + : buffer_(), |
| + object_pool_(GrowableObjectArray::Handle()), |
| + prologue_offset_(-1), |
| + comments_() { |
| + // Far branching mode is only needed and implemented for MIPS and ARM. |
| + ASSERT(!use_far_branches); |
| + if (Isolate::Current() != Dart::vm_isolate()) { |
| + object_pool_ = GrowableObjectArray::New(Heap::kOld); |
| + |
| + // These objects and labels need to be accessible through every pool-pointer |
| + // at the same index. |
| + object_pool_.Add(Object::Handle(), Heap::kOld); |
| + object_pool_.Add(Bool::True(), Heap::kOld); |
| + object_pool_.Add(Bool::False(), Heap::kOld); |
| + |
| + if (StubCode::UpdateStoreBuffer_entry() != NULL) { |
| + FindExternalLabel(&StubCode::UpdateStoreBufferLabel(), kNotPatchable); |
| + } else { |
| + object_pool_.Add(Object::Handle(), Heap::kOld); |
| + } |
| + |
| + if (StubCode::CallToRuntime_entry() != NULL) { |
| + FindExternalLabel(&StubCode::CallToRuntimeLabel(), kNotPatchable); |
| + } else { |
| + object_pool_.Add(Object::Handle(), Heap::kOld); |
| + } |
| + } |
| +} |
| + |
| + |
| void Assembler::InitializeMemoryWithBreakpoints(uword data, int length) { |
| memset(reinterpret_cast<void*>(data), Instr::kBreakPointInstruction, length); |
| } |
| @@ -95,9 +126,18 @@ |
| } |
| +void Assembler::LoadExternalLabel(Register dst, |
| + const ExternalLabel* label, |
| + Patchability patchable, |
| + Register pp) { |
| + const int32_t offset = |
| + Array::element_offset(FindExternalLabel(label, patchable)); |
| + LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); |
| +} |
| + |
| + |
| void Assembler::call(const ExternalLabel* label) { |
| AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| - intptr_t call_start = buffer_.GetPosition(); |
| // Encode movq(TMP, Immediate(label->address())), but always as imm64. |
| EmitRegisterREX(TMP, REX_W); |
| @@ -109,11 +149,41 @@ |
| EmitOperandREX(2, operand, REX_NONE); |
| EmitUint8(0xFF); |
| EmitOperand(2, operand); |
| +} |
| + |
| +void Assembler::CallPatchable(const ExternalLabel* label) { |
| + intptr_t call_start = buffer_.GetPosition(); |
| + LoadExternalLabel(TMP, label, kPatchable, PP); |
| + { |
|
Florian Schneider
2013/09/06 09:58:15
call(TMP)
zra
2013/09/06 17:53:26
Done.
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| + // Encode call(TMP). |
| + Operand operand(TMP); |
| + EmitOperandREX(2, operand, REX_NONE); |
| + EmitUint8(0xFF); |
| + EmitOperand(2, operand); |
| + } |
| ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); |
| } |
| +void Assembler::Call(const ExternalLabel* label, Register pp) { |
| + if (Isolate::Current() == Dart::vm_isolate()) { |
| + call(label); |
| + } else { |
| + LoadExternalLabel(TMP, label, kNotPatchable, pp); |
| + { |
|
Florian Schneider
2013/09/06 09:58:15
call(TMP)
zra
2013/09/06 17:53:26
Done.
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| + // Encode call(TMP). |
| + Operand operand(TMP); |
| + EmitOperandREX(2, operand, REX_NONE); |
| + EmitUint8(0xFF); |
| + EmitOperand(2, operand); |
| + } |
| + } |
| +} |
| + |
| + |
| void Assembler::pushq(Register reg) { |
| AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| EmitRegisterREX(reg, REX_NONE); |
| @@ -1960,6 +2030,15 @@ |
| } |
| +void Assembler::J(Condition condition, const ExternalLabel* label, |
| + Register pp) { |
| + Label no_jump; |
| + j(static_cast<Condition>(condition ^ 1), &no_jump); // Negate condition. |
| + Jmp(label, pp); |
| + Bind(&no_jump); |
| +} |
| + |
| + |
| void Assembler::jmp(Register reg) { |
| AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| Operand operand(reg); |
| @@ -1995,7 +2074,6 @@ |
| void Assembler::jmp(const ExternalLabel* label) { |
| AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| - intptr_t call_start = buffer_.GetPosition(); |
| // Encode movq(TMP, Immediate(label->address())), but always as imm64. |
| EmitRegisterREX(TMP, REX_W); |
| @@ -2007,11 +2085,37 @@ |
| EmitOperandREX(4, operand, REX_NONE); |
| EmitUint8(0xFF); |
| EmitOperand(4, operand); |
| +} |
| + |
| +void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) { |
| + intptr_t call_start = buffer_.GetPosition(); |
| + LoadExternalLabel(TMP, label, kPatchable, pp); |
| + { |
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| + // Encode jmp(TMP). |
|
Florian Schneider
2013/09/06 09:58:15
jmp(TMP)
zra
2013/09/06 17:53:26
Done.
|
| + Operand operand(TMP); |
| + EmitOperandREX(4, operand, REX_NONE); |
| + EmitUint8(0xFF); |
| + EmitOperand(4, operand); |
| + } |
| ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); |
| } |
| +void Assembler::Jmp(const ExternalLabel* label, Register pp) { |
| + LoadExternalLabel(TMP, label, kNotPatchable, pp); |
| + { |
|
Florian Schneider
2013/09/06 09:58:15
jmp(TMP)
zra
2013/09/06 17:53:26
Done.
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| + // Encode jmp(TMP). |
| + Operand operand(TMP); |
| + EmitOperandREX(4, operand, REX_NONE); |
| + EmitUint8(0xFF); |
| + EmitOperand(4, operand); |
| + } |
| +} |
| + |
| + |
| void Assembler::lock() { |
| AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| EmitUint8(0xF0); |
| @@ -2091,49 +2195,110 @@ |
| } |
| -void Assembler::LoadObject(Register dst, const Object& object) { |
| - if (object.IsSmi() || object.InVMHeap()) { |
| +intptr_t Assembler::FindObject(const Object& obj) { |
| + // The object pool cannot be used in the vm isolate. |
| + ASSERT(Isolate::Current() != Dart::vm_isolate()); |
| + ASSERT(obj.IsNotTemporaryScopedHandle()); |
| + ASSERT(obj.IsOld()); |
| + ASSERT(!object_pool_.IsNull()); |
| + // TODO(zra): This can be slow. Add a hash map from obj.raw() to |
| + // object pool indexes to speed lookup. |
| + for (int i = 0; i < object_pool_.Length(); i++) { |
| + if (object_pool_.At(i) == obj.raw()) { |
| + return i; |
| + } |
| + } |
| + object_pool_.Add(obj, Heap::kOld); |
| + return object_pool_.Length() - 1; |
| +} |
| + |
| + |
| +intptr_t Assembler::FindExternalLabel(const ExternalLabel* label, |
| + Patchability patchable) { |
|
srdjan
2013/09/06 16:22:44
fix indent
zra
2013/09/06 17:53:26
Done.
|
| + // The object pool cannot be used in the vm isolate. |
| + ASSERT(Isolate::Current() != Dart::vm_isolate()); |
| + ASSERT(!object_pool_.IsNull()); |
| + const uword address = label->address(); |
| + ASSERT(Utils::IsAligned(address, 4)); |
| + // The address is stored in the object array as a RawSmi. |
| + const Smi& smi = Smi::Handle(reinterpret_cast<RawSmi*>(address)); |
| + if (patchable == kNotPatchable) { |
| + // An external label used in a non-patchable call shouldn't also be used in |
| + // patchable calls. So, we can re-use existing entries for non-patchable |
| + // calls. |
|
Florian Schneider
2013/09/06 09:58:15
I find the whole approach a little brittle: Forget
zra
2013/09/06 17:53:26
It is equivalent to check that a patchable externa
|
| + // TODO(zra): This can be slow. Add a hash map from obj.raw() to |
| + // object pool indexes to speed lookup. |
| + for (int i = 0; i < object_pool_.Length(); i++) { |
| + if (object_pool_.At(i) == smi.raw()) { |
| + return i; |
| + } |
| + } |
|
srdjan
2013/09/06 16:22:44
Could you replace this code with call to Assembler
zra
2013/09/06 17:53:26
Done.
|
| + } |
| + // If the call is patchable, do not reuse an existing entry since each |
| + // reference may be patched independently. |
| + object_pool_.Add(smi, Heap::kOld); |
| + return object_pool_.Length() - 1; |
| +} |
| + |
| + |
| +bool Assembler::CanLoadFromObjectPool(const Object& object) { |
| + return !object.IsSmi() && // Not a Smi |
| + // Not in the VMHeap, OR is one of the VMHeap objects we put in every |
| + // object pool. |
| + (!object.InVMHeap() || (object.raw() == Object::null()) || |
| + (object.raw() == Bool::True().raw()) || |
| + (object.raw() == Bool::False().raw())) && |
| + object.IsNotTemporaryScopedHandle() && |
| + object.IsOld(); |
| +} |
| + |
| + |
| +void Assembler::LoadWordFromPoolOffset(Register dst, Register pp, |
| + int32_t offset) { |
| + // This sequence must be of fixed size. The 'true' argument to the Address |
| + // constructor forces it to use a fixed size encoding. |
| + movq(dst, Address(pp, offset, true)); |
| +} |
| + |
| + |
| +void Assembler::LoadObjectFromPool(Register dst, const Object& object, |
| + Register pp) { |
| + if (CanLoadFromObjectPool(object)) { |
| + const int32_t offset = Array::element_offset(FindObject(object)); |
| + LoadWordFromPoolOffset(dst, pp, offset - kHeapObjectTag); |
| + } else { |
| movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| - } else { |
| - ASSERT(object.IsNotTemporaryScopedHandle()); |
| - ASSERT(object.IsOld()); |
| - AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| - EmitRegisterREX(dst, REX_W); |
| - EmitUint8(0xB8 | (dst & 7)); |
| - buffer_.EmitObject(object); |
| } |
| } |
| void Assembler::StoreObject(const Address& dst, const Object& object) { |
| - if (object.IsSmi() || object.InVMHeap()) { |
| - movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| - } else { |
| - ASSERT(object.IsNotTemporaryScopedHandle()); |
| - ASSERT(object.IsOld()); |
| + if (CanLoadFromObjectPool(object)) { |
| LoadObject(TMP, object); |
| movq(dst, TMP); |
| + } else { |
| + movq(dst, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| } |
| } |
| void Assembler::PushObject(const Object& object) { |
| - if (object.IsSmi() || object.InVMHeap()) { |
| - pushq(Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| - } else { |
| + if (CanLoadFromObjectPool(object)) { |
| LoadObject(TMP, object); |
| pushq(TMP); |
| + } else { |
| + pushq(Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| } |
| } |
| void Assembler::CompareObject(Register reg, const Object& object) { |
| - if (object.IsSmi() || object.InVMHeap()) { |
| - cmpq(reg, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| - } else { |
| + if (CanLoadFromObjectPool(object)) { |
| ASSERT(reg != TMP); |
| LoadObject(TMP, object); |
| cmpq(reg, TMP); |
| + } else { |
| + cmpq(reg, Immediate(reinterpret_cast<int64_t>(object.raw()))); |
| } |
| } |
| @@ -2196,7 +2361,7 @@ |
| if (object != RAX) { |
| movq(RAX, object); |
| } |
| - call(&StubCode::UpdateStoreBufferLabel()); |
| + Call(&StubCode::UpdateStoreBufferLabel(), PP); |
| if (value != RAX) popq(RAX); |
| Bind(&done); |
| } |
| @@ -2298,6 +2463,23 @@ |
| } |
| +void Assembler::LeaveFrameWithPP() { |
| + movq(PP, Address(RBP, -2 * kWordSize)); |
| + LeaveFrame(); |
| +} |
| + |
| + |
| +void Assembler::ReturnPatchable() { |
| + // This sequence must have a fixed size so that it can be patched by the |
| + // debugger. |
| + intptr_t start = buffer_.GetPosition(); |
| + LeaveFrameWithPP(); |
| + ret(); |
| + nop(4); |
| + ASSERT((buffer_.GetPosition() - start) == 13); |
| +} |
| + |
| + |
| void Assembler::ReserveAlignedFrameSpace(intptr_t frame_space) { |
| // Reserve space for arguments and align frame before entering |
| // the C++ world. |
| @@ -2378,41 +2560,96 @@ |
| } |
| +void Assembler::LoadPoolPointer(Register pp) { |
| + Label next; |
| + call(&next); |
| + Bind(&next); |
| + |
| + // Load new pool pointer. |
| + const intptr_t object_pool_pc_dist = |
| + Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| + CodeSize(); |
| + popq(pp); |
| + movq(pp, Address(pp, -object_pool_pc_dist)); |
| +} |
| + |
| + |
| void Assembler::EnterDartFrame(intptr_t frame_size) { |
| EnterFrame(0); |
| + |
| Label dart_entry; |
| call(&dart_entry); |
| Bind(&dart_entry); |
| // The runtime system assumes that the code marker address is |
| // kEntryPointToPcMarkerOffset bytes from the entry. If there is any code |
| // generated before entering the frame, the address needs to be adjusted. |
| + const intptr_t object_pool_pc_dist = |
| + Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| + CodeSize(); |
| const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); |
| if (offset != 0) { |
| addq(Address(RSP, 0), Immediate(offset)); |
| } |
| + // Save caller's pool pointer |
| + pushq(PP); |
| + |
| + // Load callee's pool pointer. |
| + movq(PP, Address(RSP, 1 * kWordSize)); |
| + movq(PP, Address(PP, -object_pool_pc_dist - offset)); |
| + |
| if (frame_size != 0) { |
| subq(RSP, Immediate(frame_size)); |
| } |
| } |
| +void Assembler::EnterDartFrameWithInfo(intptr_t frame_size, |
| + Register new_pp, Register new_pc) { |
| + if (new_pc == kNoRegister) { |
| + EnterDartFrame(0); |
| + } else { |
| + EnterFrame(0); |
| + pushq(new_pc); |
| + pushq(PP); |
| + movq(PP, new_pp); |
| + } |
| + if (frame_size != 0) { |
| + subq(RSP, Immediate(frame_size)); |
| + } |
| +} |
| + |
| + |
| // On entry to a function compiled for OSR, the caller's frame pointer, the |
| // stack locals, and any copied parameters are already in place. The frame |
| // pointer is already set up. The PC marker is not correct for the |
| // optimized function and there may be extra space for spill slots to |
| // allocate. |
| -void Assembler::EnterOsrFrame(intptr_t extra_size) { |
| - Label dart_entry; |
| - call(&dart_entry); |
| - Bind(&dart_entry); |
| - // The runtime system assumes that the code marker address is |
| - // kEntryPointToPcMarkerOffset bytes from the entry. Since there is no |
| - // code to set up the frame pointer, the address needs to be adjusted. |
| - const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); |
| - if (offset != 0) { |
| - addq(Address(RSP, 0), Immediate(offset)); |
| +void Assembler::EnterOsrFrame(intptr_t extra_size, |
| + Register new_pp, Register new_pc) { |
| + if (new_pc == kNoRegister) { |
| + Label dart_entry; |
| + call(&dart_entry); |
| + Bind(&dart_entry); |
| + // The runtime system assumes that the code marker address is |
| + // kEntryPointToPcMarkerOffset bytes from the entry. Since there is no |
| + // code to set up the frame pointer, the address needs to be adjusted. |
| + const intptr_t object_pool_pc_dist = |
| + Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| + CodeSize(); |
| + const intptr_t offset = kEntryPointToPcMarkerOffset - CodeSize(); |
| + if (offset != 0) { |
| + addq(Address(RSP, 0), Immediate(offset)); |
| + } |
| + |
| + // Load callee's pool pointer. |
| + movq(PP, Address(RSP, 0)); |
| + movq(PP, Address(PP, -object_pool_pc_dist - offset)); |
| + |
| + popq(Address(RBP, kPcMarkerSlotFromFp * kWordSize)); |
| + } else { |
| + movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), new_pc); |
| + movq(PP, new_pp); |
| } |
| - popq(Address(RBP, kPcMarkerSlotFromFp * kWordSize)); |
| if (extra_size != 0) { |
| subq(RSP, Immediate(extra_size)); |
| } |
| @@ -2425,6 +2662,14 @@ |
| } |
| +void Assembler::EnterStubFrameWithPP() { |
| + EnterFrame(0); |
| + pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. |
| + pushq(PP); // Save caller's pool pointer |
| + LoadPoolPointer(PP); |
| +} |
| + |
| + |
| void Assembler::TryAllocate(const Class& cls, |
| Label* failure, |
| bool near_jump, |
| @@ -2648,6 +2893,30 @@ |
| } |
| +void Assembler::FixedSizeAddressOffsetTest() { |
| + movq(RAX, Address(RSP, 0, true)); |
| + movq(RAX, Address(RBP, 0, true)); |
| + movq(RAX, Address(RAX, 0, true)); |
| + movq(RAX, Address(R10, 0, true)); |
| + movq(RAX, Address(R12, 0, true)); |
| + movq(RAX, Address(R13, 0, true)); |
| + movq(R10, Address(RAX, 0, true)); |
| + |
| + movq(RAX, Address(RSP, kWordSize, true)); |
| + movq(RAX, Address(RBP, kWordSize, true)); |
| + movq(RAX, Address(RAX, kWordSize, true)); |
| + movq(RAX, Address(R10, kWordSize, true)); |
| + movq(RAX, Address(R12, kWordSize, true)); |
| + movq(RAX, Address(R13, kWordSize, true)); |
| + |
| + movq(RAX, Address(RSP, -kWordSize, true)); |
| + movq(RAX, Address(RBP, -kWordSize, true)); |
| + movq(RAX, Address(RAX, -kWordSize, true)); |
| + movq(RAX, Address(R10, -kWordSize, true)); |
| + movq(RAX, Address(R12, -kWordSize, true)); |
| + movq(RAX, Address(R13, -kWordSize, true)); |
| +} |
| + |
| } // namespace dart |
| #endif // defined TARGET_ARCH_X64 |