| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 102 return true; | 102 return true; |
| 103 } else { | 103 } else { |
| 104 ASSERT(IsNone()); | 104 ASSERT(IsNone()); |
| 105 return false; | 105 return false; |
| 106 } | 106 } |
| 107 } | 107 } |
| 108 | 108 |
| 109 | 109 |
| 110 inline bool CPURegister::IsValidRegister() const { | 110 inline bool CPURegister::IsValidRegister() const { |
| 111 return IsRegister() && | 111 return IsRegister() && |
| 112 ((reg_size == kWRegSize) || (reg_size == kXRegSize)) && | 112 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && |
| 113 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); | 113 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); |
| 114 } | 114 } |
| 115 | 115 |
| 116 | 116 |
| 117 inline bool CPURegister::IsValidFPRegister() const { | 117 inline bool CPURegister::IsValidFPRegister() const { |
| 118 return IsFPRegister() && | 118 return IsFPRegister() && |
| 119 ((reg_size == kSRegSize) || (reg_size == kDRegSize)) && | 119 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) && |
| 120 (reg_code < kNumberOfFPRegisters); | 120 (reg_code < kNumberOfFPRegisters); |
| 121 } | 121 } |
| 122 | 122 |
| 123 | 123 |
| 124 inline bool CPURegister::IsNone() const { | 124 inline bool CPURegister::IsNone() const { |
| 125 // kNoRegister types should always have size 0 and code 0. | 125 // kNoRegister types should always have size 0 and code 0. |
| 126 ASSERT((reg_type != kNoRegister) || (reg_code == 0)); | 126 ASSERT((reg_type != kNoRegister) || (reg_code == 0)); |
| 127 ASSERT((reg_type != kNoRegister) || (reg_size == 0)); | 127 ASSERT((reg_type != kNoRegister) || (reg_size == 0)); |
| 128 | 128 |
| 129 return reg_type == kNoRegister; | 129 return reg_type == kNoRegister; |
| (...skipping 84 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 214 ASSERT(IsValid()); | 214 ASSERT(IsValid()); |
| 215 ASSERT(CPURegister::Create(code, size_, type_).IsValid()); | 215 ASSERT(CPURegister::Create(code, size_, type_).IsValid()); |
| 216 list_ &= ~(1UL << code); | 216 list_ &= ~(1UL << code); |
| 217 } | 217 } |
| 218 | 218 |
| 219 | 219 |
| 220 inline Register Register::XRegFromCode(unsigned code) { | 220 inline Register Register::XRegFromCode(unsigned code) { |
| 221 // This function returns the zero register when code = 31. The stack pointer | 221 // This function returns the zero register when code = 31. The stack pointer |
| 222 // can not be returned. | 222 // can not be returned. |
| 223 ASSERT(code < kNumberOfRegisters); | 223 ASSERT(code < kNumberOfRegisters); |
| 224 return Register::Create(code, kXRegSize); | 224 return Register::Create(code, kXRegSizeInBits); |
| 225 } | 225 } |
| 226 | 226 |
| 227 | 227 |
| 228 inline Register Register::WRegFromCode(unsigned code) { | 228 inline Register Register::WRegFromCode(unsigned code) { |
| 229 ASSERT(code < kNumberOfRegisters); | 229 ASSERT(code < kNumberOfRegisters); |
| 230 return Register::Create(code, kWRegSize); | 230 return Register::Create(code, kWRegSizeInBits); |
| 231 } | 231 } |
| 232 | 232 |
| 233 | 233 |
| 234 inline FPRegister FPRegister::SRegFromCode(unsigned code) { | 234 inline FPRegister FPRegister::SRegFromCode(unsigned code) { |
| 235 ASSERT(code < kNumberOfFPRegisters); | 235 ASSERT(code < kNumberOfFPRegisters); |
| 236 return FPRegister::Create(code, kSRegSize); | 236 return FPRegister::Create(code, kSRegSizeInBits); |
| 237 } | 237 } |
| 238 | 238 |
| 239 | 239 |
| 240 inline FPRegister FPRegister::DRegFromCode(unsigned code) { | 240 inline FPRegister FPRegister::DRegFromCode(unsigned code) { |
| 241 ASSERT(code < kNumberOfFPRegisters); | 241 ASSERT(code < kNumberOfFPRegisters); |
| 242 return FPRegister::Create(code, kDRegSize); | 242 return FPRegister::Create(code, kDRegSizeInBits); |
| 243 } | 243 } |
| 244 | 244 |
| 245 | 245 |
| 246 inline Register CPURegister::W() const { | 246 inline Register CPURegister::W() const { |
| 247 ASSERT(IsValidRegister()); | 247 ASSERT(IsValidRegister()); |
| 248 return Register::WRegFromCode(reg_code); | 248 return Register::WRegFromCode(reg_code); |
| 249 } | 249 } |
| 250 | 250 |
| 251 | 251 |
| 252 inline Register CPURegister::X() const { | 252 inline Register CPURegister::X() const { |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 327 STATIC_ASSERT(OperandInitializer<T>::kIsIntType); | 327 STATIC_ASSERT(OperandInitializer<T>::kIsIntType); |
| 328 } | 328 } |
| 329 | 329 |
| 330 | 330 |
| 331 Operand::Operand(Register reg, Shift shift, unsigned shift_amount) | 331 Operand::Operand(Register reg, Shift shift, unsigned shift_amount) |
| 332 : reg_(reg), | 332 : reg_(reg), |
| 333 shift_(shift), | 333 shift_(shift), |
| 334 extend_(NO_EXTEND), | 334 extend_(NO_EXTEND), |
| 335 shift_amount_(shift_amount), | 335 shift_amount_(shift_amount), |
| 336 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) { | 336 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) { |
| 337 ASSERT(reg.Is64Bits() || (shift_amount < kWRegSize)); | 337 ASSERT(reg.Is64Bits() || (shift_amount < kWRegSizeInBits)); |
| 338 ASSERT(reg.Is32Bits() || (shift_amount < kXRegSize)); | 338 ASSERT(reg.Is32Bits() || (shift_amount < kXRegSizeInBits)); |
| 339 ASSERT(!reg.IsSP()); | 339 ASSERT(!reg.IsSP()); |
| 340 } | 340 } |
| 341 | 341 |
| 342 | 342 |
| 343 Operand::Operand(Register reg, Extend extend, unsigned shift_amount) | 343 Operand::Operand(Register reg, Extend extend, unsigned shift_amount) |
| 344 : reg_(reg), | 344 : reg_(reg), |
| 345 shift_(NO_SHIFT), | 345 shift_(NO_SHIFT), |
| 346 extend_(extend), | 346 extend_(extend), |
| 347 shift_amount_(shift_amount), | 347 shift_amount_(shift_amount), |
| 348 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) { | 348 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) { |
| (...skipping 650 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 999 ASSERT(IsImmAddSub(imm)); | 999 ASSERT(IsImmAddSub(imm)); |
| 1000 if (is_uint12(imm)) { // No shift required. | 1000 if (is_uint12(imm)) { // No shift required. |
| 1001 return imm << ImmAddSub_offset; | 1001 return imm << ImmAddSub_offset; |
| 1002 } else { | 1002 } else { |
| 1003 return ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset); | 1003 return ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset); |
| 1004 } | 1004 } |
| 1005 } | 1005 } |
| 1006 | 1006 |
| 1007 | 1007 |
| 1008 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) { | 1008 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) { |
| 1009 ASSERT(((reg_size == kXRegSize) && is_uint6(imms)) || | 1009 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(imms)) || |
| 1010 ((reg_size == kWRegSize) && is_uint5(imms))); | 1010 ((reg_size == kWRegSizeInBits) && is_uint5(imms))); |
| 1011 USE(reg_size); | 1011 USE(reg_size); |
| 1012 return imms << ImmS_offset; | 1012 return imms << ImmS_offset; |
| 1013 } | 1013 } |
| 1014 | 1014 |
| 1015 | 1015 |
| 1016 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) { | 1016 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) { |
| 1017 ASSERT(((reg_size == kXRegSize) && is_uint6(immr)) || | 1017 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(immr)) || |
| 1018 ((reg_size == kWRegSize) && is_uint5(immr))); | 1018 ((reg_size == kWRegSizeInBits) && is_uint5(immr))); |
| 1019 USE(reg_size); | 1019 USE(reg_size); |
| 1020 ASSERT(is_uint6(immr)); | 1020 ASSERT(is_uint6(immr)); |
| 1021 return immr << ImmR_offset; | 1021 return immr << ImmR_offset; |
| 1022 } | 1022 } |
| 1023 | 1023 |
| 1024 | 1024 |
| 1025 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) { | 1025 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) { |
| 1026 ASSERT((reg_size == kWRegSize) || (reg_size == kXRegSize)); | 1026 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); |
| 1027 ASSERT(is_uint6(imms)); | 1027 ASSERT(is_uint6(imms)); |
| 1028 ASSERT((reg_size == kXRegSize) || is_uint6(imms + 3)); | 1028 ASSERT((reg_size == kXRegSizeInBits) || is_uint6(imms + 3)); |
| 1029 USE(reg_size); | 1029 USE(reg_size); |
| 1030 return imms << ImmSetBits_offset; | 1030 return imms << ImmSetBits_offset; |
| 1031 } | 1031 } |
| 1032 | 1032 |
| 1033 | 1033 |
| 1034 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) { | 1034 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) { |
| 1035 ASSERT((reg_size == kWRegSize) || (reg_size == kXRegSize)); | 1035 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); |
| 1036 ASSERT(((reg_size == kXRegSize) && is_uint6(immr)) || | 1036 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(immr)) || |
| 1037 ((reg_size == kWRegSize) && is_uint5(immr))); | 1037 ((reg_size == kWRegSizeInBits) && is_uint5(immr))); |
| 1038 USE(reg_size); | 1038 USE(reg_size); |
| 1039 return immr << ImmRotate_offset; | 1039 return immr << ImmRotate_offset; |
| 1040 } | 1040 } |
| 1041 | 1041 |
| 1042 | 1042 |
| 1043 Instr Assembler::ImmLLiteral(int imm19) { | 1043 Instr Assembler::ImmLLiteral(int imm19) { |
| 1044 CHECK(is_int19(imm19)); | 1044 CHECK(is_int19(imm19)); |
| 1045 return truncate_to_int19(imm19) << ImmLLiteral_offset; | 1045 return truncate_to_int19(imm19) << ImmLLiteral_offset; |
| 1046 } | 1046 } |
| 1047 | 1047 |
| 1048 | 1048 |
| 1049 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) { | 1049 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) { |
| 1050 ASSERT((reg_size == kWRegSize) || (reg_size == kXRegSize)); | 1050 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); |
| 1051 ASSERT((reg_size == kXRegSize) || (bitn == 0)); | 1051 ASSERT((reg_size == kXRegSizeInBits) || (bitn == 0)); |
| 1052 USE(reg_size); | 1052 USE(reg_size); |
| 1053 return bitn << BitN_offset; | 1053 return bitn << BitN_offset; |
| 1054 } | 1054 } |
| 1055 | 1055 |
| 1056 | 1056 |
| 1057 Instr Assembler::ShiftDP(Shift shift) { | 1057 Instr Assembler::ShiftDP(Shift shift) { |
| 1058 ASSERT(shift == LSL || shift == LSR || shift == ASR || shift == ROR); | 1058 ASSERT(shift == LSL || shift == LSR || shift == ASR || shift == ROR); |
| 1059 return shift << ShiftDP_offset; | 1059 return shift << ShiftDP_offset; |
| 1060 } | 1060 } |
| 1061 | 1061 |
| (...skipping 142 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1204 | 1204 |
| 1205 | 1205 |
| 1206 void Assembler::ClearRecordedAstId() { | 1206 void Assembler::ClearRecordedAstId() { |
| 1207 recorded_ast_id_ = TypeFeedbackId::None(); | 1207 recorded_ast_id_ = TypeFeedbackId::None(); |
| 1208 } | 1208 } |
| 1209 | 1209 |
| 1210 | 1210 |
| 1211 } } // namespace v8::internal | 1211 } } // namespace v8::internal |
| 1212 | 1212 |
| 1213 #endif // V8_A64_ASSEMBLER_A64_INL_H_ | 1213 #endif // V8_A64_ASSEMBLER_A64_INL_H_ |
| OLD | NEW |