| Index: src/assembler_ia32.cpp
|
| diff --git a/src/assembler_ia32.cpp b/src/assembler_ia32.cpp
|
| new file mode 100644
|
| index 0000000000000000000000000000000000000000..d1841e840905acfeece7255d32ac44e11b5e0d36
|
| --- /dev/null
|
| +++ b/src/assembler_ia32.cpp
|
| @@ -0,0 +1,2044 @@
|
| +// Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
|
| +// for details. All rights reserved. Use of this source code is governed by a
|
| +// BSD-style license that can be found in the LICENSE file.
|
| +//
|
| +// Modified by the Subzero authors.
|
| +//
|
| +//===- subzero/src/assembler_ia32.cpp - Assembler for x86-32 -------------===//
|
| +//
|
| +// The Subzero Code Generator
|
| +//
|
| +// This file is distributed under the University of Illinois Open Source
|
| +// License. See LICENSE.TXT for details.
|
| +//
|
| +//===----------------------------------------------------------------------===//
|
| +//
|
| +// This file implements the Assembler class for x86-32.
|
| +//
|
| +//===----------------------------------------------------------------------===//
|
| +
|
| +#include "assembler_ia32.h"
|
| +#include "IceMemoryRegion.h"
|
| +
|
| +namespace Ice {
|
| +namespace x86 {
|
| +
|
| +class DirectCallRelocation : public AssemblerFixup {
|
| +public:
|
| + static DirectCallRelocation *create(Assembler *Asm, FixupKind Kind,
|
| + const ConstantRelocatable *Sym) {
|
| + return new (Asm->Allocate<DirectCallRelocation>())
|
| + DirectCallRelocation(Kind, Sym);
|
| + }
|
| +
|
| + void Process(const MemoryRegion ®ion, intptr_t position) {
|
| + // Direct calls are relative to the following instruction on x86.
|
| + int32_t pointer = region.Load<int32_t>(position);
|
| + int32_t delta = region.start() + position + sizeof(int32_t);
|
| + region.Store<int32_t>(position, pointer - delta);
|
| + }
|
| +
|
| +private:
|
| + DirectCallRelocation(FixupKind Kind, const ConstantRelocatable *Sym)
|
| + : AssemblerFixup(Kind, Sym) {}
|
| +};
|
| +
|
| +void AssemblerX86::call(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xFF);
|
| + EmitRegisterOperand(2, reg);
|
| +}
|
| +
|
| +void AssemblerX86::call(const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xFF);
|
| + EmitOperand(2, address);
|
| +}
|
| +
|
| +void AssemblerX86::call(Label *label) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xE8);
|
| + static const int kSize = 5;
|
| + EmitLabel(label, kSize);
|
| +}
|
| +
|
| +void AssemblerX86::call(const ConstantRelocatable *label) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + intptr_t call_start = buffer_.GetPosition();
|
| + EmitUint8(0xE8);
|
| + EmitFixup(DirectCallRelocation::create(this, FK_PcRel_4, label));
|
| + EmitInt32(-4);
|
| + assert((buffer_.GetPosition() - call_start) == kCallExternalLabelSize);
|
| +}
|
| +
|
| +void AssemblerX86::pushl(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x50 + reg);
|
| +}
|
| +
|
| +void AssemblerX86::pushl(const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xFF);
|
| + EmitOperand(6, address);
|
| +}
|
| +
|
| +void AssemblerX86::pushl(const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x68);
|
| + EmitImmediate(imm);
|
| +}
|
| +
|
| +void AssemblerX86::popl(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x58 + reg);
|
| +}
|
| +
|
| +void AssemblerX86::popl(const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x8F);
|
| + EmitOperand(0, address);
|
| +}
|
| +
|
| +void AssemblerX86::pushal() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x60);
|
| +}
|
| +
|
| +void AssemblerX86::popal() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x61);
|
| +}
|
| +
|
| +void AssemblerX86::setcc(Condition condition, ByteRegister dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x90 + condition);
|
| + EmitUint8(0xC0 + dst);
|
| +}
|
| +
|
| +void AssemblerX86::movl(Register dst, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xB8 + dst);
|
| + EmitImmediate(imm);
|
| +}
|
| +
|
| +void AssemblerX86::movl(Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x89);
|
| + EmitRegisterOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movl(Register dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x8B);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movl(const Address &dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x89);
|
| + EmitOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movl(const Address &dst, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xC7);
|
| + EmitOperand(0, dst);
|
| + EmitImmediate(imm);
|
| +}
|
| +
|
| +void AssemblerX86::movzxb(Register dst, ByteRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xB6);
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movzxb(Register dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xB6);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movsxb(Register dst, ByteRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xBE);
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movsxb(Register dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xBE);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movb(ByteRegister dst, const Address &src) {
|
| + (void)dst;
|
| + (void)src;
|
| + // FATAL
|
| + llvm_unreachable("Use movzxb or movsxb instead.");
|
| +}
|
| +
|
| +void AssemblerX86::movb(const Address &dst, ByteRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x88);
|
| + EmitOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movb(const Address &dst, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xC6);
|
| + EmitOperand(EAX, dst);
|
| + assert(imm.is_int8());
|
| + EmitUint8(imm.value() & 0xFF);
|
| +}
|
| +
|
| +void AssemblerX86::movzxw(Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xB7);
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movzxw(Register dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xB7);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movsxw(Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xBF);
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movsxw(Register dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xBF);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movw(Register dst, const Address &src) {
|
| + (void)dst;
|
| + (void)src;
|
| + // FATAL
|
| + llvm_unreachable("Use movzxw or movsxw instead.");
|
| +}
|
| +
|
| +void AssemblerX86::movw(const Address &dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x89);
|
| + EmitOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::leal(Register dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x8D);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cmov(Condition cond, Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x40 + cond);
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::rep_movsb() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0xA4);
|
| +}
|
| +
|
| +void AssemblerX86::movss(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x10);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movss(const Address &dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x11);
|
| + EmitOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movss(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x11);
|
| + EmitXmmRegisterOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movd(XmmRegister dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x6E);
|
| + EmitOperand(dst, Operand(src));
|
| +}
|
| +
|
| +void AssemblerX86::movd(Register dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x7E);
|
| + EmitOperand(src, Operand(dst));
|
| +}
|
| +
|
| +void AssemblerX86::movq(const Address &dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xD6);
|
| + EmitOperand(src, Operand(dst));
|
| +}
|
| +
|
| +void AssemblerX86::movq(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x7E);
|
| + EmitOperand(dst, Operand(src));
|
| +}
|
| +
|
| +// TODO(jvoung): In these xmm1, xmm2/mXX cases, the opcodes don't vary
|
| +// based on operand-type. Should we just have an "addss_start()" method,
|
| +// and then the caller can do EmitXmmRegisterOperand(dst, src)
|
| +// or EmitOperand(dst, src)? Then we don't have to fill in the missing
|
| +// variants which only handle Xmm, Xmm (but not Xmm, mXX).
|
| +// Only thing is we'll be diverging, and we'll end up making the
|
| +// EmitXmmRegisterOperand, etc. public instead of private.
|
| +
|
| +void AssemblerX86::addss(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x58);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::addss(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x58);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::subss(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5C);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::subss(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5C);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::mulss(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x59);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::mulss(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x59);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::divss(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5E);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::divss(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5E);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::flds(const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xD9);
|
| + EmitOperand(0, src);
|
| +}
|
| +
|
| +void AssemblerX86::fstps(const Address &dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xD9);
|
| + EmitOperand(3, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movsd(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x10);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movsd(const Address &dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x11);
|
| + EmitOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movsd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x11);
|
| + EmitXmmRegisterOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movaps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x28);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movups(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x10);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movups(const Address &dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x11);
|
| + EmitOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::addsd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x58);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::addsd(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x58);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::padd(Type Ty, XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + if (Ty == IceType_i8 || Ty == IceType_i1) {
|
| + EmitUint8(0xFC);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitUint8(0xFD);
|
| + } else {
|
| + EmitUint8(0xFE);
|
| + }
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::padd(Type Ty, XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + if (Ty == IceType_i8 || Ty == IceType_i1) {
|
| + EmitUint8(0xFC);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitUint8(0xFD);
|
| + } else {
|
| + EmitUint8(0xFE);
|
| + }
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::psub(Type Ty, XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + if (Ty == IceType_i8 || Ty == IceType_i1) {
|
| + EmitUint8(0xF8);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitUint8(0xF9);
|
| + } else {
|
| + EmitUint8(0xFA);
|
| + }
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::psub(Type Ty, XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + if (Ty == IceType_i8 || Ty == IceType_i1) {
|
| + EmitUint8(0xF8);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitUint8(0xF9);
|
| + } else {
|
| + EmitUint8(0xFA);
|
| + }
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::addps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x58);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::addps(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x58);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::subps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5C);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::subps(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5C);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::divps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5E);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::divps(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5E);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::mulps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x59);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::mulps(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x59);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::minps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5D);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::maxps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5F);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::andps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x54);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::andps(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x54);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::orps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x56);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cmpps(XmmRegister dst, XmmRegister src,
|
| + uint8_t CmpCondition) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xC2);
|
| + EmitXmmRegisterOperand(dst, src);
|
| + EmitUint8(CmpCondition);
|
| +}
|
| +
|
| +void AssemblerX86::cmpps(XmmRegister dst, const Address &src,
|
| + uint8_t CmpCondition) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xC2);
|
| + EmitOperand(dst, src);
|
| + EmitUint8(CmpCondition);
|
| +}
|
| +
|
| +void AssemblerX86::sqrtps(XmmRegister dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x51);
|
| + EmitXmmRegisterOperand(dst, dst);
|
| +}
|
| +
|
| +void AssemblerX86::rsqrtps(XmmRegister dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x52);
|
| + EmitXmmRegisterOperand(dst, dst);
|
| +}
|
| +
|
| +void AssemblerX86::reciprocalps(XmmRegister dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x53);
|
| + EmitXmmRegisterOperand(dst, dst);
|
| +}
|
| +
|
| +void AssemblerX86::movhlps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x12);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movlhps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x16);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::unpcklps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x14);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::unpckhps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x15);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::unpcklpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x14);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::unpckhpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x15);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::set1ps(XmmRegister dst, Register tmp1,
|
| + const Immediate &imm) {
|
| + // Load 32-bit immediate value into tmp1.
|
| + movl(tmp1, imm);
|
| + // Move value from tmp1 into dst.
|
| + movd(dst, tmp1);
|
| + // Broadcast low lane into other three lanes.
|
| + shufps(dst, dst, Immediate(0x0));
|
| +}
|
| +
|
| +void AssemblerX86::shufps(XmmRegister dst, XmmRegister src,
|
| + const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xC6);
|
| + EmitXmmRegisterOperand(dst, src);
|
| + assert(imm.is_uint8());
|
| + EmitUint8(imm.value());
|
| +}
|
| +
|
| +void AssemblerX86::addpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x58);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::subpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5C);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::mulpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x59);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::divpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5E);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::minpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5D);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::maxpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5F);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::sqrtpd(XmmRegister dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x51);
|
| + EmitXmmRegisterOperand(dst, dst);
|
| +}
|
| +
|
| +void AssemblerX86::cvtps2pd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5A);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cvtpd2ps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5A);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::shufpd(XmmRegister dst, XmmRegister src,
|
| + const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xC6);
|
| + EmitXmmRegisterOperand(dst, src);
|
| + assert(imm.is_uint8());
|
| + EmitUint8(imm.value());
|
| +}
|
| +
|
| +void AssemblerX86::subsd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5C);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::subsd(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5C);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::mulsd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x59);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::mulsd(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x59);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::divsd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5E);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::divsd(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5E);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cvtsi2ss(XmmRegister dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2A);
|
| + EmitOperand(dst, Operand(src));
|
| +}
|
| +
|
| +void AssemblerX86::cvtsi2sd(XmmRegister dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2A);
|
| + EmitOperand(dst, Operand(src));
|
| +}
|
| +
|
| +void AssemblerX86::cvtss2si(Register dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2D);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cvtss2sd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5A);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cvtsd2si(Register dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2D);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cvttss2si(Register dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2C);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cvttsd2si(Register dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2C);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cvtsd2ss(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x5A);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cvtdq2pd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xE6);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::ucomiss(XmmRegister a, XmmRegister b) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2E);
|
| + EmitXmmRegisterOperand(a, b);
|
| +}
|
| +
|
| +void AssemblerX86::ucomiss(XmmRegister a, const Address &b) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2E);
|
| + EmitOperand(a, b);
|
| +}
|
| +
|
| +void AssemblerX86::ucomisd(XmmRegister a, XmmRegister b) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2E);
|
| + EmitXmmRegisterOperand(a, b);
|
| +}
|
| +
|
| +void AssemblerX86::ucomisd(XmmRegister a, const Address &b) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x2E);
|
| + EmitOperand(a, b);
|
| +}
|
| +
|
| +void AssemblerX86::movmskpd(Register dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x50);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::movmskps(Register dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x50);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::sqrtsd(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x51);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::sqrtsd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF2);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x51);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::sqrtss(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x51);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::sqrtss(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF3);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x51);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::xorpd(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x57);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::xorpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x57);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::orpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x56);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::xorps(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x57);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::xorps(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x57);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::andpd(XmmRegister dst, const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x54);
|
| + EmitOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::andpd(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x54);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::pextrd(Register dst, XmmRegister src, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x3A);
|
| + EmitUint8(0x16);
|
| + EmitOperand(src, Operand(dst));
|
| + assert(imm.is_uint8());
|
| + EmitUint8(imm.value());
|
| +}
|
| +
|
| +void AssemblerX86::pmovsxdq(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x38);
|
| + EmitUint8(0x25);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::pcmpeqq(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x38);
|
| + EmitUint8(0x29);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::pxor(XmmRegister dst, XmmRegister src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xEF);
|
| + EmitXmmRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::roundsd(XmmRegister dst, XmmRegister src,
|
| + RoundingMode mode) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x3A);
|
| + EmitUint8(0x0B);
|
| + EmitXmmRegisterOperand(dst, src);
|
| + // Mask precision exeption.
|
| + EmitUint8(static_cast<uint8_t>(mode) | 0x8);
|
| +}
|
| +
|
| +void AssemblerX86::fldl(const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xDD);
|
| + EmitOperand(0, src);
|
| +}
|
| +
|
| +void AssemblerX86::fstpl(const Address &dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xDD);
|
| + EmitOperand(3, dst);
|
| +}
|
| +
|
| +void AssemblerX86::fnstcw(const Address &dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xD9);
|
| + EmitOperand(7, dst);
|
| +}
|
| +
|
| +void AssemblerX86::fldcw(const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xD9);
|
| + EmitOperand(5, src);
|
| +}
|
| +
|
| +void AssemblerX86::fistpl(const Address &dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xDF);
|
| + EmitOperand(7, dst);
|
| +}
|
| +
|
| +void AssemblerX86::fistps(const Address &dst) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xDB);
|
| + EmitOperand(3, dst);
|
| +}
|
| +
|
| +void AssemblerX86::fildl(const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xDF);
|
| + EmitOperand(5, src);
|
| +}
|
| +
|
| +void AssemblerX86::filds(const Address &src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xDB);
|
| + EmitOperand(0, src);
|
| +}
|
| +
|
| +void AssemblerX86::fincstp() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xD9);
|
| + EmitUint8(0xF7);
|
| +}
|
| +
|
| +void AssemblerX86::xchgl(Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x87);
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::cmp(Type Ty, Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(7, Operand(reg), imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(7, Operand(reg), imm);
|
| +}
|
| +
|
| +void AssemblerX86::cmp(Type Ty, Register reg0, Register reg1) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x3A);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x3B);
|
| + } else {
|
| + EmitUint8(0x3B);
|
| + }
|
| + EmitRegisterOperand(reg0, reg1);
|
| +}
|
| +
|
| +void AssemblerX86::cmp(Type Ty, Register reg, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x3A);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x3B);
|
| + } else {
|
| + EmitUint8(0x3B);
|
| + }
|
| + EmitOperand(reg, address);
|
| +}
|
| +
|
| +void AssemblerX86::cmp(Type Ty, const Address &address, Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x38);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x39);
|
| + } else {
|
| + EmitUint8(0x39);
|
| + }
|
| + EmitOperand(reg, address);
|
| +}
|
| +
|
| +void AssemblerX86::cmp(Type Ty, const Address &address, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(7, address, imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(7, address, imm);
|
| +}
|
| +
|
| +void AssemblerX86::testl(Register reg1, Register reg2) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x85);
|
| + EmitRegisterOperand(reg1, reg2);
|
| +}
|
| +
|
| +void AssemblerX86::testl(Register reg, const Immediate &immediate) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + // For registers that have a byte variant (EAX, EBX, ECX, and EDX)
|
| + // we only test the byte register to keep the encoding short.
|
| + if (immediate.is_uint8() && reg < 4) {
|
| + // Use zero-extended 8-bit immediate.
|
| + if (reg == EAX) {
|
| + EmitUint8(0xA8);
|
| + } else {
|
| + EmitUint8(0xF6);
|
| + EmitUint8(0xC0 + reg);
|
| + }
|
| + EmitUint8(immediate.value() & 0xFF);
|
| + } else if (reg == EAX) {
|
| + // Use short form if the destination is EAX.
|
| + EmitUint8(0xA9);
|
| + EmitImmediate(immediate);
|
| + } else {
|
| + EmitUint8(0xF7);
|
| + EmitOperand(0, Operand(reg));
|
| + EmitImmediate(immediate);
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::_and(Type Ty, Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x22);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x23);
|
| + } else {
|
| + EmitUint8(0x23);
|
| + }
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::_and(Type Ty, Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(4, Operand(reg), imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(4, Operand(reg), imm);
|
| +}
|
| +
|
| +void AssemblerX86::_and(Type Ty, Register dst, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x22);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x23);
|
| + } else {
|
| + EmitUint8(0x23);
|
| + }
|
| + EmitOperand(dst, address);
|
| +}
|
| +
|
| +void AssemblerX86::_or(Type Ty, Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x0A);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x0B);
|
| + } else {
|
| + EmitUint8(0x0B);
|
| + }
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::_or(Type Ty, Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(1, Operand(reg), imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(1, Operand(reg), imm);
|
| +}
|
| +
|
| +void AssemblerX86::_or(Type Ty, Register dst, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x0A);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x0B);
|
| + } else {
|
| + EmitUint8(0x0B);
|
| + }
|
| + EmitOperand(dst, address);
|
| +}
|
| +
|
| +void AssemblerX86::_xor(Type Ty, Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x32);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x33);
|
| + } else {
|
| + EmitUint8(0x33);
|
| + }
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::_xor(Type Ty, Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(6, Operand(reg), imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(6, Operand(reg), imm);
|
| +}
|
| +
|
| +void AssemblerX86::_xor(Type Ty, Register dst, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x32);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x33);
|
| + } else {
|
| + EmitUint8(0x33);
|
| + }
|
| + EmitOperand(dst, address);
|
| +}
|
| +
|
| +void AssemblerX86::add(Type Ty, Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(0, Operand(reg), imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(0, Operand(reg), imm);
|
| +}
|
| +
|
| +void AssemblerX86::add(Type Ty, const Address &address, Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x00);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x01);
|
| + } else {
|
| + EmitUint8(0x01);
|
| + }
|
| + EmitOperand(reg, address);
|
| +}
|
| +
|
| +void AssemblerX86::add(Type Ty, const Address &address, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(0, address, imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(0, address, imm);
|
| +}
|
| +
|
| +// TODO(jvoung): In these cases, Reg-Reg and Reg-Mem are very similar.
|
| +// Instead of using EmitRegisterOperand() it is possible to use
|
| +// EmitOperand(dst, Operand(src)), and then they would be identical.
|
| +// Then the only outlier is Reg-Imm.
|
| +
|
| +void AssemblerX86::add(Type Ty, Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x02);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x03);
|
| + } else {
|
| + EmitUint8(0x03);
|
| + }
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::add(Type Ty, Register reg, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x02);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x03);
|
| + } else {
|
| + EmitUint8(0x03);
|
| + }
|
| + EmitOperand(reg, address);
|
| +}
|
| +
|
| +void AssemblerX86::adc(Type Ty, Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(2, Operand(reg), imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(2, Operand(reg), imm);
|
| +}
|
| +
|
| +void AssemblerX86::adc(Type Ty, Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x12);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x13);
|
| + } else {
|
| + EmitUint8(0x13);
|
| + }
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::adc(Type Ty, Register dst, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x12);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x13);
|
| + } else {
|
| + EmitUint8(0x13);
|
| + }
|
| + EmitOperand(dst, address);
|
| +}
|
| +
|
| +void AssemblerX86::sub(Type Ty, Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x2A);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x2B);
|
| + } else {
|
| + EmitUint8(0x2B);
|
| + }
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::sub(Type Ty, Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(5, Operand(reg), imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(5, Operand(reg), imm);
|
| +}
|
| +
|
| +void AssemblerX86::sub(Type Ty, Register reg, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x2A);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x2B);
|
| + } else {
|
| + EmitUint8(0x2B);
|
| + }
|
| + EmitOperand(reg, address);
|
| +}
|
| +
|
| +void AssemblerX86::cbw() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x98);
|
| +}
|
| +
|
| +void AssemblerX86::cwd() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x99);
|
| +}
|
| +
|
| +void AssemblerX86::cdq() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x99);
|
| +}
|
| +
|
| +void AssemblerX86::idivl(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF7);
|
| + EmitUint8(0xF8 | reg);
|
| +}
|
| +
|
| +void AssemblerX86::imull(Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xAF);
|
| + EmitOperand(dst, Operand(src));
|
| +}
|
| +
|
| +void AssemblerX86::imull(Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x69);
|
| + EmitOperand(reg, Operand(reg));
|
| + EmitImmediate(imm);
|
| +}
|
| +
|
| +void AssemblerX86::imull(Register reg, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xAF);
|
| + EmitOperand(reg, address);
|
| +}
|
| +
|
| +void AssemblerX86::imull(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF7);
|
| + EmitOperand(5, Operand(reg));
|
| +}
|
| +
|
| +void AssemblerX86::imull(const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF7);
|
| + EmitOperand(5, address);
|
| +}
|
| +
|
| +void AssemblerX86::mull(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF7);
|
| + EmitOperand(4, Operand(reg));
|
| +}
|
| +
|
| +void AssemblerX86::mull(const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF7);
|
| + EmitOperand(4, address);
|
| +}
|
| +
|
| +void AssemblerX86::sbb(Type Ty, Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x1A);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x1B);
|
| + } else {
|
| + EmitUint8(0x1B);
|
| + }
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::sbb(Type Ty, Register reg, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitComplexI8(3, Operand(reg), imm);
|
| + return;
|
| + }
|
| + if (Ty == IceType_i16)
|
| + EmitOperandSizeOverride();
|
| + EmitComplex(3, Operand(reg), imm);
|
| +}
|
| +
|
| +void AssemblerX86::sbb(Type Ty, Register dst, const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (typeWidthInBytes(Ty) == 1) {
|
| + EmitUint8(0x1A);
|
| + } else if (Ty == IceType_i16) {
|
| + EmitOperandSizeOverride();
|
| + EmitUint8(0x1B);
|
| + } else {
|
| + EmitUint8(0x1B);
|
| + }
|
| + EmitOperand(dst, address);
|
| +}
|
| +
|
| +void AssemblerX86::incl(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x40 + reg);
|
| +}
|
| +
|
| +void AssemblerX86::incl(const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xFF);
|
| + EmitOperand(0, address);
|
| +}
|
| +
|
| +void AssemblerX86::decl(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x48 + reg);
|
| +}
|
| +
|
| +void AssemblerX86::decl(const Address &address) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xFF);
|
| + EmitOperand(1, address);
|
| +}
|
| +
|
| +void AssemblerX86::shll(Register reg, const Immediate &imm) {
|
| + EmitGenericShift(4, reg, imm);
|
| +}
|
| +
|
| +void AssemblerX86::shll(Register operand, Register shifter) {
|
| + EmitGenericShift(4, Operand(operand), shifter);
|
| +}
|
| +
|
| +void AssemblerX86::shll(const Address &operand, Register shifter) {
|
| + EmitGenericShift(4, Operand(operand), shifter);
|
| +}
|
| +
|
| +void AssemblerX86::shrl(Register reg, const Immediate &imm) {
|
| + EmitGenericShift(5, reg, imm);
|
| +}
|
| +
|
| +void AssemblerX86::shrl(Register operand, Register shifter) {
|
| + EmitGenericShift(5, Operand(operand), shifter);
|
| +}
|
| +
|
| +void AssemblerX86::sarl(Register reg, const Immediate &imm) {
|
| + EmitGenericShift(7, reg, imm);
|
| +}
|
| +
|
| +void AssemblerX86::sarl(Register operand, Register shifter) {
|
| + EmitGenericShift(7, Operand(operand), shifter);
|
| +}
|
| +
|
| +void AssemblerX86::sarl(const Address &address, Register shifter) {
|
| + EmitGenericShift(7, Operand(address), shifter);
|
| +}
|
| +
|
| +void AssemblerX86::shld(Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xA5);
|
| + EmitRegisterOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::shld(Register dst, Register src, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + assert(imm.is_int8());
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xA4);
|
| + EmitRegisterOperand(src, dst);
|
| + EmitUint8(imm.value() & 0xFF);
|
| +}
|
| +
|
| +void AssemblerX86::shld(const Address &operand, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xA5);
|
| + EmitOperand(src, Operand(operand));
|
| +}
|
| +
|
| +void AssemblerX86::shrd(Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xAD);
|
| + EmitRegisterOperand(src, dst);
|
| +}
|
| +
|
| +void AssemblerX86::shrd(Register dst, Register src, const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + assert(imm.is_int8());
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xAC);
|
| + EmitRegisterOperand(src, dst);
|
| + EmitUint8(imm.value() & 0xFF);
|
| +}
|
| +
|
| +void AssemblerX86::shrd(const Address &dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xAD);
|
| + EmitOperand(src, Operand(dst));
|
| +}
|
| +
|
| +void AssemblerX86::negl(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF7);
|
| + EmitOperand(3, Operand(reg));
|
| +}
|
| +
|
| +void AssemblerX86::notl(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF7);
|
| + EmitUint8(0xD0 | reg);
|
| +}
|
| +
|
| +void AssemblerX86::bsrl(Register dst, Register src) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xBD);
|
| + EmitRegisterOperand(dst, src);
|
| +}
|
| +
|
| +void AssemblerX86::bt(Register base, Register offset) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xA3);
|
| + EmitRegisterOperand(offset, base);
|
| +}
|
| +
|
| +void AssemblerX86::enter(const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xC8);
|
| + assert(imm.is_uint16());
|
| + EmitUint8(imm.value() & 0xFF);
|
| + EmitUint8((imm.value() >> 8) & 0xFF);
|
| + EmitUint8(0x00);
|
| +}
|
| +
|
| +void AssemblerX86::leave() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xC9);
|
| +}
|
| +
|
| +void AssemblerX86::ret() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xC3);
|
| +}
|
| +
|
| +void AssemblerX86::ret(const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xC2);
|
| + assert(imm.is_uint16());
|
| + EmitUint8(imm.value() & 0xFF);
|
| + EmitUint8((imm.value() >> 8) & 0xFF);
|
| +}
|
| +
|
| +void AssemblerX86::nop(int size) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + // There are nops up to size 15, but for now just provide up to size 8.
|
| + assert(0 < size && size <= MAX_NOP_SIZE);
|
| + switch (size) {
|
| + case 1:
|
| + EmitUint8(0x90);
|
| + break;
|
| + case 2:
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x90);
|
| + break;
|
| + case 3:
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x1F);
|
| + EmitUint8(0x00);
|
| + break;
|
| + case 4:
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x1F);
|
| + EmitUint8(0x40);
|
| + EmitUint8(0x00);
|
| + break;
|
| + case 5:
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x1F);
|
| + EmitUint8(0x44);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + break;
|
| + case 6:
|
| + EmitUint8(0x66);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x1F);
|
| + EmitUint8(0x44);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + break;
|
| + case 7:
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x1F);
|
| + EmitUint8(0x80);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + break;
|
| + case 8:
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x1F);
|
| + EmitUint8(0x84);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + EmitUint8(0x00);
|
| + break;
|
| + default:
|
| + llvm_unreachable("UNIMPLEMENTED");
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::int3() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xCC);
|
| +}
|
| +
|
| +void AssemblerX86::hlt() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF4);
|
| +}
|
| +
|
| +void AssemblerX86::j(Condition condition, Label *label, bool near) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (label->IsBound()) {
|
| + static const int kShortSize = 2;
|
| + static const int kLongSize = 6;
|
| + intptr_t offset = label->Position() - buffer_.Size();
|
| + assert(offset <= 0);
|
| + if (Utils::IsInt(8, offset - kShortSize)) {
|
| + EmitUint8(0x70 + condition);
|
| + EmitUint8((offset - kShortSize) & 0xFF);
|
| + } else {
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x80 + condition);
|
| + EmitInt32(offset - kLongSize);
|
| + }
|
| + } else if (near) {
|
| + EmitUint8(0x70 + condition);
|
| + EmitNearLabelLink(label);
|
| + } else {
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x80 + condition);
|
| + EmitLabelLink(label);
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::j(Condition condition, const ConstantRelocatable *label) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0x80 + condition);
|
| + EmitFixup(DirectCallRelocation::create(this, FK_PcRel_4, label));
|
| + EmitInt32(-4);
|
| +}
|
| +
|
| +void AssemblerX86::jmp(Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xFF);
|
| + EmitRegisterOperand(4, reg);
|
| +}
|
| +
|
| +void AssemblerX86::jmp(Label *label, bool near) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + if (label->IsBound()) {
|
| + static const int kShortSize = 2;
|
| + static const int kLongSize = 5;
|
| + intptr_t offset = label->Position() - buffer_.Size();
|
| + assert(offset <= 0);
|
| + if (Utils::IsInt(8, offset - kShortSize)) {
|
| + EmitUint8(0xEB);
|
| + EmitUint8((offset - kShortSize) & 0xFF);
|
| + } else {
|
| + EmitUint8(0xE9);
|
| + EmitInt32(offset - kLongSize);
|
| + }
|
| + } else if (near) {
|
| + EmitUint8(0xEB);
|
| + EmitNearLabelLink(label);
|
| + } else {
|
| + EmitUint8(0xE9);
|
| + EmitLabelLink(label);
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::jmp(const ConstantRelocatable *label) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xE9);
|
| + EmitFixup(DirectCallRelocation::create(this, FK_PcRel_4, label));
|
| + EmitInt32(-4);
|
| +}
|
| +
|
| +void AssemblerX86::lock() {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0xF0);
|
| +}
|
| +
|
| +void AssemblerX86::cmpxchgl(const Address &address, Register reg) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + EmitUint8(0x0F);
|
| + EmitUint8(0xB1);
|
| + EmitOperand(reg, address);
|
| +}
|
| +
|
| +void AssemblerX86::Align(intptr_t alignment, intptr_t offset) {
|
| + assert(llvm::isPowerOf2_32(alignment));
|
| + intptr_t pos = offset + buffer_.GetPosition();
|
| + intptr_t mod = pos & (alignment - 1);
|
| + if (mod == 0) {
|
| + return;
|
| + }
|
| + intptr_t bytes_needed = alignment - mod;
|
| + while (bytes_needed > MAX_NOP_SIZE) {
|
| + nop(MAX_NOP_SIZE);
|
| + bytes_needed -= MAX_NOP_SIZE;
|
| + }
|
| + if (bytes_needed) {
|
| + nop(bytes_needed);
|
| + }
|
| + assert(((offset + buffer_.GetPosition()) & (alignment - 1)) == 0);
|
| +}
|
| +
|
| +void AssemblerX86::Bind(Label *label) {
|
| + intptr_t bound = buffer_.Size();
|
| + assert(!label->IsBound()); // Labels can only be bound once.
|
| + while (label->IsLinked()) {
|
| + intptr_t position = label->LinkPosition();
|
| + intptr_t next = buffer_.Load<int32_t>(position);
|
| + buffer_.Store<int32_t>(position, bound - (position + 4));
|
| + label->position_ = next;
|
| + }
|
| + while (label->HasNear()) {
|
| + intptr_t position = label->NearPosition();
|
| + intptr_t offset = bound - (position + 1);
|
| + assert(Utils::IsInt(8, offset));
|
| + buffer_.Store<int8_t>(position, offset);
|
| + }
|
| + label->BindTo(bound);
|
| +}
|
| +
|
| +void AssemblerX86::EmitOperand(int rm, const Operand &operand) {
|
| + assert(rm >= 0 && rm < 8);
|
| + const intptr_t length = operand.length_;
|
| + assert(length > 0);
|
| + // Emit the ModRM byte updated with the given RM value.
|
| + assert((operand.encoding_[0] & 0x38) == 0);
|
| + EmitUint8(operand.encoding_[0] + (rm << 3));
|
| + if (operand.fixup()) {
|
| + EmitFixup(operand.fixup());
|
| + }
|
| + // Emit the rest of the encoded operand.
|
| + for (intptr_t i = 1; i < length; i++) {
|
| + EmitUint8(operand.encoding_[i]);
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::EmitImmediate(const Immediate &imm) {
|
| + EmitInt32(imm.value());
|
| +}
|
| +
|
| +void AssemblerX86::EmitComplexI8(int rm, const Operand &operand,
|
| + const Immediate &immediate) {
|
| + assert(rm >= 0 && rm < 8);
|
| + assert(immediate.is_int8());
|
| + if (operand.IsRegister(EAX)) {
|
| + // Use short form if the destination is al.
|
| + EmitUint8(0x04 + (rm << 3));
|
| + EmitUint8(immediate.value() & 0xFF);
|
| + } else {
|
| + // Use sign-extended 8-bit immediate.
|
| + EmitUint8(0x80);
|
| + EmitOperand(rm, operand);
|
| + EmitUint8(immediate.value() & 0xFF);
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::EmitComplex(int rm, const Operand &operand,
|
| + const Immediate &immediate) {
|
| + assert(rm >= 0 && rm < 8);
|
| + if (immediate.is_int8()) {
|
| + // Use sign-extended 8-bit immediate.
|
| + EmitUint8(0x83);
|
| + EmitOperand(rm, operand);
|
| + EmitUint8(immediate.value() & 0xFF);
|
| + } else if (operand.IsRegister(EAX)) {
|
| + // Use short form if the destination is eax.
|
| + EmitUint8(0x05 + (rm << 3));
|
| + EmitImmediate(immediate);
|
| + } else {
|
| + EmitUint8(0x81);
|
| + EmitOperand(rm, operand);
|
| + EmitImmediate(immediate);
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::EmitLabel(Label *label, intptr_t instruction_size) {
|
| + if (label->IsBound()) {
|
| + intptr_t offset = label->Position() - buffer_.Size();
|
| + assert(offset <= 0);
|
| + EmitInt32(offset - instruction_size);
|
| + } else {
|
| + EmitLabelLink(label);
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::EmitLabelLink(Label *label) {
|
| + assert(!label->IsBound());
|
| + intptr_t position = buffer_.Size();
|
| + EmitInt32(label->position_);
|
| + label->LinkTo(position);
|
| +}
|
| +
|
| +void AssemblerX86::EmitNearLabelLink(Label *label) {
|
| + assert(!label->IsBound());
|
| + intptr_t position = buffer_.Size();
|
| + EmitUint8(0);
|
| + label->NearLinkTo(position);
|
| +}
|
| +
|
| +void AssemblerX86::EmitGenericShift(int rm, Register reg,
|
| + const Immediate &imm) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + assert(imm.is_int8());
|
| + if (imm.value() == 1) {
|
| + EmitUint8(0xD1);
|
| + EmitOperand(rm, Operand(reg));
|
| + } else {
|
| + EmitUint8(0xC1);
|
| + EmitOperand(rm, Operand(reg));
|
| + EmitUint8(imm.value() & 0xFF);
|
| + }
|
| +}
|
| +
|
| +void AssemblerX86::EmitGenericShift(int rm, const Operand &operand,
|
| + Register shifter) {
|
| + AssemblerBuffer::EnsureCapacity ensured(&buffer_);
|
| + assert(shifter == ECX);
|
| + EmitUint8(0xD3);
|
| + EmitOperand(rm, Operand(operand));
|
| +}
|
| +
|
| +} // end of namespace x86
|
| +} // end of namespace Ice
|
|
|