| Index: runtime/vm/assembler_x64.cc
|
| diff --git a/runtime/vm/assembler_x64.cc b/runtime/vm/assembler_x64.cc
|
| index e741102573ac360cc02ff22d2f5a288e6994fc3c..fcab5eafb922677ce26acc847fd68ac7d4211bf3 100644
|
| --- a/runtime/vm/assembler_x64.cc
|
| +++ b/runtime/vm/assembler_x64.cc
|
| @@ -81,36 +81,25 @@ void Assembler::call(const ExternalLabel* label) {
|
| }
|
|
|
|
|
| -void Assembler::CallPatchable(const ExternalLabel* label) {
|
| +void Assembler::CallPatchable(const StubEntry& stub_entry) {
|
| ASSERT(constant_pool_allowed());
|
| intptr_t call_start = buffer_.GetPosition();
|
| const int32_t offset = ObjectPool::element_offset(
|
| - object_pool_wrapper_.FindExternalLabel(label, kPatchable));
|
| + object_pool_wrapper_.FindExternalLabel(&stub_entry.label(), kPatchable));
|
| call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag));
|
| ASSERT((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
|
| }
|
|
|
|
|
| -void Assembler::Call(const ExternalLabel* label) {
|
| +void Assembler::Call(const StubEntry& stub_entry) {
|
| ASSERT(constant_pool_allowed());
|
| const int32_t offset = ObjectPool::element_offset(
|
| - object_pool_wrapper_.FindExternalLabel(label, kNotPatchable));
|
| + object_pool_wrapper_.FindExternalLabel(&stub_entry.label(),
|
| + kNotPatchable));
|
| call(Address::AddressBaseImm32(PP, offset - kHeapObjectTag));
|
| }
|
|
|
|
|
| -void Assembler::CallPatchable(const StubEntry& stub_entry) {
|
| - const ExternalLabel label(stub_entry.EntryPoint());
|
| - CallPatchable(&label);
|
| -}
|
| -
|
| -
|
| -void Assembler::Call(const StubEntry& stub_entry) {
|
| - const ExternalLabel label(stub_entry.EntryPoint());
|
| - Call(&label);
|
| -}
|
| -
|
| -
|
| void Assembler::pushq(Register reg) {
|
| AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| EmitRegisterREX(reg, REX_NONE);
|
| @@ -2544,32 +2533,16 @@ void Assembler::j(Condition condition, Label* label, bool near) {
|
| }
|
|
|
|
|
| -void Assembler::j(Condition condition, const ExternalLabel* label) {
|
| - Label no_jump;
|
| - // Negate condition.
|
| - j(static_cast<Condition>(condition ^ 1), &no_jump, Assembler::kNearJump);
|
| - jmp(label);
|
| - Bind(&no_jump);
|
| -}
|
| -
|
| -
|
| -void Assembler::J(Condition condition, const ExternalLabel* label,
|
| +void Assembler::J(Condition condition, const StubEntry& stub_entry,
|
| Register pp) {
|
| Label no_jump;
|
| // Negate condition.
|
| j(static_cast<Condition>(condition ^ 1), &no_jump, Assembler::kNearJump);
|
| - Jmp(label, pp);
|
| + Jmp(stub_entry, pp);
|
| Bind(&no_jump);
|
| }
|
|
|
|
|
| -void Assembler::J(Condition condition, const StubEntry& stub_entry,
|
| - Register pp) {
|
| - const ExternalLabel label(stub_entry.EntryPoint());
|
| - J(condition, &label, pp);
|
| -}
|
| -
|
| -
|
| void Assembler::jmp(Register reg) {
|
| AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| Operand operand(reg);
|
| @@ -2626,42 +2599,30 @@ void Assembler::jmp(const ExternalLabel* label) {
|
|
|
|
|
| void Assembler::jmp(const StubEntry& stub_entry) {
|
| - const ExternalLabel label(stub_entry.EntryPoint());
|
| - jmp(&label);
|
| + jmp(&stub_entry.label());
|
| }
|
|
|
|
|
| -void Assembler::JmpPatchable(const ExternalLabel* label, Register pp) {
|
| +void Assembler::JmpPatchable(const StubEntry& stub_entry, Register pp) {
|
| ASSERT((pp != PP) || constant_pool_allowed());
|
| intptr_t call_start = buffer_.GetPosition();
|
| const int32_t offset = ObjectPool::element_offset(
|
| - object_pool_wrapper_.FindExternalLabel(label, kPatchable));
|
| + object_pool_wrapper_.FindExternalLabel(&stub_entry.label(), kPatchable));
|
| // Patchable jumps always use a 32-bit immediate encoding.
|
| jmp(Address::AddressBaseImm32(pp, offset - kHeapObjectTag));
|
| ASSERT((buffer_.GetPosition() - call_start) == JumpPattern::kLengthInBytes);
|
| }
|
|
|
|
|
| -void Assembler::JmpPatchable(const StubEntry& stub_entry, Register pp) {
|
| - const ExternalLabel label(stub_entry.EntryPoint());
|
| - JmpPatchable(&label, pp);
|
| -}
|
| -
|
| -
|
| -void Assembler::Jmp(const ExternalLabel* label, Register pp) {
|
| +void Assembler::Jmp(const StubEntry& stub_entry, Register pp) {
|
| ASSERT((pp != PP) || constant_pool_allowed());
|
| const int32_t offset = ObjectPool::element_offset(
|
| - object_pool_wrapper_.FindExternalLabel(label, kNotPatchable));
|
| + object_pool_wrapper_.FindExternalLabel(&stub_entry.label(),
|
| + kNotPatchable));
|
| jmp(Address(pp, offset - kHeapObjectTag));
|
| }
|
|
|
|
|
| -void Assembler::Jmp(const StubEntry& stub_entry, Register pp) {
|
| - const ExternalLabel label(stub_entry.EntryPoint());
|
| - Jmp(&label, pp);
|
| -}
|
| -
|
| -
|
| void Assembler::lock() {
|
| AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| EmitUint8(0xF0);
|
| @@ -3356,7 +3317,7 @@ void Assembler::PopRegisters(intptr_t cpu_register_set,
|
|
|
|
|
| void Assembler::EnterCallRuntimeFrame(intptr_t frame_space) {
|
| - EnterFrame(0);
|
| + EnterStubFrame();
|
|
|
| // TODO(vegorov): avoid saving FpuTMP, it is used only as scratch.
|
| PushRegisters(CallingConventions::kVolatileCpuRegisters,
|
| @@ -3376,23 +3337,15 @@ void Assembler::LeaveCallRuntimeFrame() {
|
| RegisterSet::RegisterCount(CallingConventions::kVolatileXmmRegisters);
|
| const intptr_t kPushedRegistersSize =
|
| kPushedCpuRegistersCount * kWordSize +
|
| - kPushedXmmRegistersCount * kFpuRegisterSize;
|
| + kPushedXmmRegistersCount * kFpuRegisterSize +
|
| + 2 * kWordSize; // PP, pc marker from EnterStubFrame
|
| leaq(RSP, Address(RBP, -kPushedRegistersSize));
|
|
|
| // TODO(vegorov): avoid saving FpuTMP, it is used only as scratch.
|
| PopRegisters(CallingConventions::kVolatileCpuRegisters,
|
| CallingConventions::kVolatileXmmRegisters);
|
|
|
| - leave();
|
| -}
|
| -
|
| -
|
| -void Assembler::CallCFunction(const ExternalLabel* label) {
|
| - // Reserve shadow space for outgoing arguments.
|
| - if (CallingConventions::kShadowSpaceBytes != 0) {
|
| - subq(RSP, Immediate(CallingConventions::kShadowSpaceBytes));
|
| - }
|
| - call(label);
|
| + LeaveStubFrame();
|
| }
|
|
|
|
|
|
|