Index: src/arm64/assembler-arm64-inl.h |
diff --git a/src/arm64/assembler-arm64-inl.h b/src/arm64/assembler-arm64-inl.h |
index d11e71c87f0127e48715b1441e5bd8d57d7c9a5d..fd784b4b1161fcbcc9e9dae5751638bd79785ab9 100644 |
--- a/src/arm64/assembler-arm64-inl.h |
+++ b/src/arm64/assembler-arm64-inl.h |
@@ -57,6 +57,15 @@ inline int CPURegister::SizeInBytes() const { |
return reg_size / 8; |
} |
+inline bool CPURegister::Is8Bits() const { |
+ DCHECK(IsValid()); |
+ return reg_size == 8; |
+} |
+ |
+inline bool CPURegister::Is16Bits() const { |
+ DCHECK(IsValid()); |
+ return reg_size == 16; |
+} |
inline bool CPURegister::Is32Bits() const { |
DCHECK(IsValid()); |
@@ -69,9 +78,13 @@ inline bool CPURegister::Is64Bits() const { |
return reg_size == 64; |
} |
+inline bool CPURegister::Is128Bits() const { |
+ DCHECK(IsValid()); |
+ return reg_size == 128; |
+} |
inline bool CPURegister::IsValid() const { |
- if (IsValidRegister() || IsValidFPRegister()) { |
+ if (IsValidRegister() || IsValidVRegister()) { |
DCHECK(!IsNone()); |
return true; |
} else { |
@@ -87,14 +100,14 @@ inline bool CPURegister::IsValidRegister() const { |
((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); |
} |
- |
-inline bool CPURegister::IsValidFPRegister() const { |
- return IsFPRegister() && |
- ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) && |
- (reg_code < kNumberOfFPRegisters); |
+inline bool CPURegister::IsValidVRegister() const { |
+ return IsVRegister() && |
+ ((reg_size == kBRegSizeInBits) || (reg_size == kHRegSizeInBits) || |
+ (reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits) || |
+ (reg_size == kQRegSizeInBits)) && |
+ (reg_code < kNumberOfVRegisters); |
} |
- |
inline bool CPURegister::IsNone() const { |
// kNoRegister types should always have size 0 and code 0. |
DCHECK((reg_type != kNoRegister) || (reg_code == 0)); |
@@ -120,11 +133,7 @@ inline bool CPURegister::IsRegister() const { |
return reg_type == kRegister; |
} |
- |
-inline bool CPURegister::IsFPRegister() const { |
- return reg_type == kFPRegister; |
-} |
- |
+inline bool CPURegister::IsVRegister() const { return reg_type == kVRegister; } |
inline bool CPURegister::IsSameSizeAndType(const CPURegister& other) const { |
return (reg_size == other.reg_size) && (reg_type == other.reg_type); |
@@ -200,7 +209,7 @@ inline Register Register::XRegFromCode(unsigned code) { |
if (code == kSPRegInternalCode) { |
return csp; |
} else { |
- DCHECK(code < kNumberOfRegisters); |
+ DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters)); |
return Register::Create(code, kXRegSizeInBits); |
} |
} |
@@ -210,23 +219,40 @@ inline Register Register::WRegFromCode(unsigned code) { |
if (code == kSPRegInternalCode) { |
return wcsp; |
} else { |
- DCHECK(code < kNumberOfRegisters); |
+ DCHECK_LT(code, static_cast<unsigned>(kNumberOfRegisters)); |
return Register::Create(code, kWRegSizeInBits); |
} |
} |
+inline VRegister VRegister::BRegFromCode(unsigned code) { |
+ DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
+ return VRegister::Create(code, kBRegSizeInBits); |
+} |
+ |
+inline VRegister VRegister::HRegFromCode(unsigned code) { |
+ DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
+ return VRegister::Create(code, kHRegSizeInBits); |
+} |
-inline FPRegister FPRegister::SRegFromCode(unsigned code) { |
- DCHECK(code < kNumberOfFPRegisters); |
- return FPRegister::Create(code, kSRegSizeInBits); |
+inline VRegister VRegister::SRegFromCode(unsigned code) { |
+ DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
+ return VRegister::Create(code, kSRegSizeInBits); |
} |
+inline VRegister VRegister::DRegFromCode(unsigned code) { |
+ DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
+ return VRegister::Create(code, kDRegSizeInBits); |
+} |
-inline FPRegister FPRegister::DRegFromCode(unsigned code) { |
- DCHECK(code < kNumberOfFPRegisters); |
- return FPRegister::Create(code, kDRegSizeInBits); |
+inline VRegister VRegister::QRegFromCode(unsigned code) { |
+ DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
+ return VRegister::Create(code, kQRegSizeInBits); |
} |
+inline VRegister VRegister::VRegFromCode(unsigned code) { |
+ DCHECK_LT(code, static_cast<unsigned>(kNumberOfVRegisters)); |
+ return VRegister::Create(code, kVRegSizeInBits); |
+} |
inline Register CPURegister::W() const { |
DCHECK(IsValidRegister()); |
@@ -239,16 +265,34 @@ inline Register CPURegister::X() const { |
return Register::XRegFromCode(reg_code); |
} |
+inline VRegister CPURegister::V() const { |
+ DCHECK(IsValidVRegister()); |
+ return VRegister::VRegFromCode(reg_code); |
+} |
+ |
+inline VRegister CPURegister::B() const { |
+ DCHECK(IsValidVRegister()); |
+ return VRegister::BRegFromCode(reg_code); |
+} |
+ |
+inline VRegister CPURegister::H() const { |
+ DCHECK(IsValidVRegister()); |
+ return VRegister::HRegFromCode(reg_code); |
+} |
-inline FPRegister CPURegister::S() const { |
- DCHECK(IsValidFPRegister()); |
- return FPRegister::SRegFromCode(reg_code); |
+inline VRegister CPURegister::S() const { |
+ DCHECK(IsValidVRegister()); |
+ return VRegister::SRegFromCode(reg_code); |
} |
+inline VRegister CPURegister::D() const { |
+ DCHECK(IsValidVRegister()); |
+ return VRegister::DRegFromCode(reg_code); |
+} |
-inline FPRegister CPURegister::D() const { |
- DCHECK(IsValidFPRegister()); |
- return FPRegister::DRegFromCode(reg_code); |
+inline VRegister CPURegister::Q() const { |
+ DCHECK(IsValidVRegister()); |
+ return VRegister::QRegFromCode(reg_code); |
} |
@@ -491,7 +535,7 @@ MemOperand::MemOperand(Register base, const Operand& offset, AddrMode addrmode) |
regoffset_ = NoReg; |
} else if (offset.IsShiftedRegister()) { |
- DCHECK(addrmode == Offset); |
+ DCHECK((addrmode == Offset) || (addrmode == PostIndex)); |
regoffset_ = offset.reg(); |
shift_ = offset.shift(); |
@@ -877,21 +921,20 @@ LoadStoreOp Assembler::LoadOpFor(const CPURegister& rt) { |
if (rt.IsRegister()) { |
return rt.Is64Bits() ? LDR_x : LDR_w; |
} else { |
- DCHECK(rt.IsFPRegister()); |
- return rt.Is64Bits() ? LDR_d : LDR_s; |
- } |
-} |
- |
- |
-LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, |
- const CPURegister& rt2) { |
- DCHECK(AreSameSizeAndType(rt, rt2)); |
- USE(rt2); |
- if (rt.IsRegister()) { |
- return rt.Is64Bits() ? LDP_x : LDP_w; |
- } else { |
- DCHECK(rt.IsFPRegister()); |
- return rt.Is64Bits() ? LDP_d : LDP_s; |
+ DCHECK(rt.IsVRegister()); |
+ switch (rt.SizeInBits()) { |
+ case kBRegSizeInBits: |
+ return LDR_b; |
+ case kHRegSizeInBits: |
+ return LDR_h; |
+ case kSRegSizeInBits: |
+ return LDR_s; |
+ case kDRegSizeInBits: |
+ return LDR_d; |
+ default: |
+ DCHECK(rt.IsQ()); |
+ return LDR_q; |
+ } |
} |
} |
@@ -901,11 +944,29 @@ LoadStoreOp Assembler::StoreOpFor(const CPURegister& rt) { |
if (rt.IsRegister()) { |
return rt.Is64Bits() ? STR_x : STR_w; |
} else { |
- DCHECK(rt.IsFPRegister()); |
- return rt.Is64Bits() ? STR_d : STR_s; |
+ DCHECK(rt.IsVRegister()); |
+ switch (rt.SizeInBits()) { |
+ case kBRegSizeInBits: |
+ return STR_b; |
+ case kHRegSizeInBits: |
+ return STR_h; |
+ case kSRegSizeInBits: |
+ return STR_s; |
+ case kDRegSizeInBits: |
+ return STR_d; |
+ default: |
+ DCHECK(rt.IsQ()); |
+ return STR_q; |
+ } |
} |
} |
+LoadStorePairOp Assembler::LoadPairOpFor(const CPURegister& rt, |
+ const CPURegister& rt2) { |
+ DCHECK_EQ(STP_w | LoadStorePairLBit, LDP_w); |
+ return static_cast<LoadStorePairOp>(StorePairOpFor(rt, rt2) | |
+ LoadStorePairLBit); |
+} |
LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, |
const CPURegister& rt2) { |
@@ -914,8 +975,16 @@ LoadStorePairOp Assembler::StorePairOpFor(const CPURegister& rt, |
if (rt.IsRegister()) { |
return rt.Is64Bits() ? STP_x : STP_w; |
} else { |
- DCHECK(rt.IsFPRegister()); |
- return rt.Is64Bits() ? STP_d : STP_s; |
+ DCHECK(rt.IsVRegister()); |
+ switch (rt.SizeInBits()) { |
+ case kSRegSizeInBits: |
+ return STP_s; |
+ case kDRegSizeInBits: |
+ return STP_d; |
+ default: |
+ DCHECK(rt.IsQ()); |
+ return STP_q; |
+ } |
} |
} |
@@ -924,7 +993,7 @@ LoadLiteralOp Assembler::LoadLiteralOpFor(const CPURegister& rt) { |
if (rt.IsRegister()) { |
return rt.Is64Bits() ? LDR_x_lit : LDR_w_lit; |
} else { |
- DCHECK(rt.IsFPRegister()); |
+ DCHECK(rt.IsVRegister()); |
return rt.Is64Bits() ? LDR_d_lit : LDR_s_lit; |
} |
} |
@@ -1107,9 +1176,8 @@ Instr Assembler::ImmLS(int imm9) { |
return truncate_to_int9(imm9) << ImmLS_offset; |
} |
- |
-Instr Assembler::ImmLSPair(int imm7, LSDataSize size) { |
- DCHECK(((imm7 >> size) << size) == imm7); |
+Instr Assembler::ImmLSPair(int imm7, unsigned size) { |
+ DCHECK_EQ((imm7 >> size) << size, imm7); |
int scaled_imm7 = imm7 >> size; |
DCHECK(is_int7(scaled_imm7)); |
return truncate_to_int7(scaled_imm7) << ImmLSPair_offset; |
@@ -1151,10 +1219,17 @@ Instr Assembler::ImmBarrierType(int imm2) { |
return imm2 << ImmBarrierType_offset; |
} |
- |
-LSDataSize Assembler::CalcLSDataSize(LoadStoreOp op) { |
- DCHECK((SizeLS_offset + SizeLS_width) == (kInstructionSize * 8)); |
- return static_cast<LSDataSize>(op >> SizeLS_offset); |
+unsigned Assembler::CalcLSDataSize(LoadStoreOp op) { |
+ DCHECK((LSSize_offset + LSSize_width) == (kInstructionSize * 8)); |
+ unsigned size = static_cast<Instr>(op >> LSSize_offset); |
+ if ((op & LSVector_mask) != 0) { |
+ // Vector register memory operations encode the access size in the "size" |
+ // and "opc" fields. |
+ if ((size == 0) && ((op & LSOpc_mask) >> LSOpc_offset) >= 2) { |
+ size = kQRegSizeLog2; |
+ } |
+ } |
+ return size; |
} |
@@ -1169,11 +1244,7 @@ Instr Assembler::ShiftMoveWide(int shift) { |
return shift << ShiftMoveWide_offset; |
} |
- |
-Instr Assembler::FPType(FPRegister fd) { |
- return fd.Is64Bits() ? FP64 : FP32; |
-} |
- |
+Instr Assembler::FPType(VRegister fd) { return fd.Is64Bits() ? FP64 : FP32; } |
Instr Assembler::FPScale(unsigned scale) { |
DCHECK(is_uint6(scale)); |