Chromium Code Reviews| Index: runtime/vm/assembler_x64.cc |
| diff --git a/runtime/vm/assembler_x64.cc b/runtime/vm/assembler_x64.cc |
| index 02f85df28f4f56b83919f123968bdf7f207599e1..3534cf6c867376a48d4b3b66d4744bef612e25ae 100644 |
| --- a/runtime/vm/assembler_x64.cc |
| +++ b/runtime/vm/assembler_x64.cc |
| @@ -44,15 +44,15 @@ Assembler::Assembler(bool use_far_branches) |
| const Smi& vacant = Smi::Handle(Smi::New(0xfa >> kSmiTagShift)); |
| StubCode* stub_code = isolate->stub_code(); |
| if (stub_code->UpdateStoreBuffer_entry() != NULL) { |
| - object_pool_wrapper_.AddExternalLabel( |
| - &stub_code->UpdateStoreBufferLabel(), kNotPatchable); |
| + object_pool_wrapper_.AddObject( |
| + Code::Handle(stub_code->UpdateStoreBufferCode()), kNotPatchable); |
|
srdjan
2015/06/18 21:29:46
Code::Handle(isolate,
Florian Schneider
2015/06/29 14:50:24
Done.
|
| } else { |
| object_pool_wrapper_.AddObject(vacant); |
| } |
| if (stub_code->CallToRuntime_entry() != NULL) { |
| - object_pool_wrapper_.AddExternalLabel( |
| - &stub_code->CallToRuntimeLabel(), kNotPatchable); |
| + object_pool_wrapper_.AddObject( |
| + Code::Handle(stub_code->CallToRuntimeCode()), kNotPatchable); |
|
srdjan
2015/06/18 21:29:46
Code::Handle(isolate,
Florian Schneider
2015/06/29 14:50:24
Done.
|
| } else { |
| object_pool_wrapper_.AddObject(vacant); |
| } |
| @@ -111,23 +111,29 @@ void Assembler::call(const ExternalLabel* label) { |
| } |
| -void Assembler::CallPatchable(const ExternalLabel* label) { |
| +void Assembler::CallPatchable(const Code& target) { |
| ASSERT(allow_constant_pool()); |
| intptr_t call_start = buffer_.GetPosition(); |
| const int32_t offset = ObjectPool::element_offset( |
| - object_pool_wrapper_.FindExternalLabel(label, kPatchable)); |
| - call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag)); |
| + object_pool_wrapper_.FindObject(target, kPatchable)); |
| + LoadWordFromPoolOffset(CODE_REG, PP, offset - kHeapObjectTag); |
| + movq(TMP, FieldAddress(CODE_REG, Code::instructions_offset())); |
| + addq(TMP, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
| + call(TMP); |
| ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize); |
| } |
| -void Assembler::Call(const ExternalLabel* label, Register pp) { |
| +void Assembler::Call(const Code& target, Register pp) { |
| if (Isolate::Current() == Dart::vm_isolate()) { |
| - call(label); |
| + UNREACHABLE(); |
| } else { |
| const int32_t offset = ObjectPool::element_offset( |
| - object_pool_wrapper_.FindExternalLabel(label, kNotPatchable)); |
| - call(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); |
| + object_pool_wrapper_.FindObject(target, kNotPatchable)); |
| + LoadWordFromPoolOffset(CODE_REG, PP, offset - kHeapObjectTag); |
| + movq(TMP, FieldAddress(CODE_REG, Code::instructions_offset())); |
| + addq(TMP, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
| + call(TMP); |
| } |
| } |
| @@ -2557,25 +2563,6 @@ void Assembler::j(Condition condition, Label* label, bool near) { |
| } |
| -void Assembler::j(Condition condition, const ExternalLabel* label) { |
| - Label no_jump; |
| - // Negate condition. |
| - j(static_cast<Condition>(condition ^ 1), &no_jump, Assembler::kNearJump); |
| - jmp(label); |
| - Bind(&no_jump); |
| -} |
| - |
| - |
| -void Assembler::J(Condition condition, const ExternalLabel* label, |
| - Register pp) { |
| - Label no_jump; |
| - // Negate condition. |
| - j(static_cast<Condition>(condition ^ 1), &no_jump, Assembler::kNearJump); |
| - Jmp(label, pp); |
| - Bind(&no_jump); |
| -} |
| - |
| - |
| void Assembler::jmp(Register reg) { |
| AssemblerBuffer::EnsureCapacity ensured(&buffer_); |
| Operand operand(reg); |
| @@ -2631,21 +2618,36 @@ void Assembler::jmp(const ExternalLabel* label) { |
| } |
| -void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) { |
| +void Assembler::JmpPatchable(const Code& target, Register pp) { |
| ASSERT(allow_constant_pool()); |
| intptr_t call_start = buffer_.GetPosition(); |
| const int32_t offset = ObjectPool::element_offset( |
| - object_pool_wrapper_.FindExternalLabel(label, kPatchable)); |
| - // Patchable jumps always use a 32-bit immediate encoding. |
| - jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag)); |
| + object_pool_wrapper_.FindObject(target, kPatchable)); |
| + LoadWordFromPoolOffset(CODE_REG, pp, offset - kHeapObjectTag); |
| + movq(TMP, FieldAddress(CODE_REG, Code::instructions_offset())); |
| + addq(TMP, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
| + jmp(TMP); |
| ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes); |
| } |
| -void Assembler::Jmp(const ExternalLabel* label, Register pp) { |
| +void Assembler::Jmp(const Code& target, Register pp) { |
| const int32_t offset = ObjectPool::element_offset( |
| - object_pool_wrapper_.FindExternalLabel(label, kNotPatchable)); |
| - jmp(Address(pp, offset - kHeapObjectTag)); |
| + object_pool_wrapper_.FindObject(target, kNotPatchable)); |
| + movq(CODE_REG, FieldAddress(pp, offset)); |
| + movq(TMP, FieldAddress(CODE_REG, Code::instructions_offset())); |
| + addq(TMP, Immediate(Instructions::HeaderSize() - kHeapObjectTag)); |
| + jmp(TMP); |
| +} |
| + |
| + |
| +void Assembler::J(Condition condition, |
| + const Code& target, |
| + Register pp) { |
|
srdjan
2015/06/18 21:29:47
All arguments on one line
Florian Schneider
2015/06/29 14:50:24
Done.
|
| + Label no_jump; |
| + j(static_cast<Condition>(condition ^ 1), &no_jump); // Negate condition. |
| + Jmp(target, pp); |
| + Bind(&no_jump); |
| } |
| @@ -3082,7 +3084,7 @@ void Assembler::StoreIntoObject(Register object, |
| movq(RDX, object); |
| } |
| StubCode* stub_code = Isolate::Current()->stub_code(); |
| - Call(&stub_code->UpdateStoreBufferLabel(), PP); |
| + Call(Code::Handle(stub_code->UpdateStoreBufferCode()), PP); |
| if (value != RDX) popq(RDX); |
| Bind(&done); |
| } |
| @@ -3392,52 +3394,84 @@ void Assembler::CallRuntime(const RuntimeEntry& entry, |
| } |
| +void Assembler::RestoreCodePointer() { |
| + movq(CODE_REG, Address(RBP, kPcMarkerSlotFromFp * kWordSize)); |
| +} |
| + |
| + |
| void Assembler::LoadPoolPointer(Register pp) { |
| // Load new pool pointer. |
| - const intptr_t kRIPRelativeMovqSize = 7; |
| - const intptr_t entry_to_rip_offset = CodeSize() + kRIPRelativeMovqSize; |
| - const intptr_t object_pool_pc_dist = |
| - Instructions::HeaderSize() - Instructions::object_pool_offset(); |
| - movq(pp, Address::AddressRIPRelative( |
| - -entry_to_rip_offset - object_pool_pc_dist)); |
| - ASSERT(CodeSize() == entry_to_rip_offset); |
| + CheckCodePointer(); |
| + movq(pp, FieldAddress(CODE_REG, Code::object_pool_offset())); |
| } |
| -void Assembler::EnterDartFrameWithInfo(intptr_t frame_size, |
| - Register new_pp, |
| - Register pc_marker_override) { |
| +void Assembler::EnterDartFrame(intptr_t frame_size, Register new_pp) { |
| + CheckCodePointer(); |
| EnterFrame(0); |
| - pushq(pc_marker_override); |
| + pushq(CODE_REG); |
| pushq(PP); |
| - movq(PP, new_pp); |
| + if (new_pp == kNoRegister) { |
| + LoadPoolPointer(PP); |
| + } else { |
| + movq(PP, new_pp); |
| + } |
| if (frame_size != 0) { |
| subq(RSP, Immediate(frame_size)); |
| } |
| } |
| -void Assembler::LeaveDartFrame() { |
| +void Assembler::LeaveDartFrame(RestorePP restore_pp) { |
| // Restore caller's PP register that was pushed in EnterDartFrame. |
| - movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize))); |
| + if (restore_pp == kRestoreCallerPP) { |
| + movq(PP, Address(RBP, (kSavedCallerPpSlotFromFp * kWordSize))); |
| + } |
| LeaveFrame(); |
| } |
| +void Assembler::CheckCodePointer() { |
| +#ifdef DEBUG |
| + Label cid_ok, instructions_ok; |
| + pushq(RAX); |
| + LoadClassId(RAX, CODE_REG); |
| + cmpq(RAX, Immediate(kCodeCid)); |
| + j(EQUAL, &cid_ok); |
| + int3(); |
| + Bind(&cid_ok); |
| + { |
| + const intptr_t kRIPRelativeLeaqSize = 7; |
| + const intptr_t header_to_entry_offset = |
| + (Instructions::HeaderSize() - kHeapObjectTag); |
| + const intptr_t header_to_rip_offset = |
| + CodeSize() + kRIPRelativeLeaqSize + header_to_entry_offset; |
| + leaq(RAX, Address::AddressRIPRelative(-header_to_rip_offset)); |
| + ASSERT(CodeSize() == (header_to_rip_offset - header_to_entry_offset)); |
| + } |
| + cmpq(RAX, FieldAddress(CODE_REG, Code::instructions_offset())); |
| + j(EQUAL, &instructions_ok); |
| + int3(); |
| + Bind(&instructions_ok); |
| + popq(RAX); |
| +#endif |
| +} |
| + |
| + |
| // On entry to a function compiled for OSR, the caller's frame pointer, the |
| // stack locals, and any copied parameters are already in place. The frame |
| // pointer is already set up. The PC marker is not correct for the |
| // optimized function and there may be extra space for spill slots to |
| // allocate. |
| -void Assembler::EnterOsrFrame(intptr_t extra_size, |
| - Register new_pp, |
| - Register pc_marker_override) { |
| +void Assembler::EnterOsrFrame(intptr_t extra_size) { |
| if (prologue_offset_ == -1) { |
| Comment("PrologueOffset = %" Pd "", CodeSize()); |
| prologue_offset_ = CodeSize(); |
| } |
| - movq(Address(RBP, kPcMarkerSlotFromFp * kWordSize), pc_marker_override); |
| - movq(PP, new_pp); |
| + RestoreCodePointer(); |
| + movq(PP, FieldAddress(CODE_REG, Code::object_pool_offset())); |
| + |
| + CheckCodePointer(); |
| if (extra_size != 0) { |
| subq(RSP, Immediate(extra_size)); |
| } |
| @@ -3445,8 +3479,9 @@ void Assembler::EnterOsrFrame(intptr_t extra_size, |
| void Assembler::EnterStubFrame() { |
| + CheckCodePointer(); |
| EnterFrame(0); |
| - pushq(Immediate(0)); // Push 0 in the saved PC area for stub frames. |
| + pushq(CODE_REG); |
| pushq(PP); // Save caller's pool pointer |
| LoadPoolPointer(PP); |
| } |