| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 126 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 137 | 137 |
| 138 int32_t SignedBits(int msb, int lsb) const { | 138 int32_t SignedBits(int msb, int lsb) const { |
| 139 int32_t bits = *(reinterpret_cast<const int32_t*>(this)); | 139 int32_t bits = *(reinterpret_cast<const int32_t*>(this)); |
| 140 return signed_bitextract_32(msb, lsb, bits); | 140 return signed_bitextract_32(msb, lsb, bits); |
| 141 } | 141 } |
| 142 | 142 |
| 143 Instr Mask(uint32_t mask) const { | 143 Instr Mask(uint32_t mask) const { |
| 144 return InstructionBits() & mask; | 144 return InstructionBits() & mask; |
| 145 } | 145 } |
| 146 | 146 |
| 147 Instruction* following(int count = 1) { | 147 V8_INLINE Instruction* following(int count = 1) { |
| 148 return this + count * kInstructionSize; | 148 return InstructionAtOffset(count * static_cast<int>(kInstructionSize)); |
| 149 } | 149 } |
| 150 | 150 |
| 151 Instruction* preceding(int count = 1) { | 151 V8_INLINE Instruction* preceding(int count = 1) { |
| 152 return this - count * kInstructionSize; | 152 return following(-count); |
| 153 } | 153 } |
| 154 | 154 |
| 155 #define DEFINE_GETTER(Name, HighBit, LowBit, Func) \ | 155 #define DEFINE_GETTER(Name, HighBit, LowBit, Func) \ |
| 156 int64_t Name() const { return Func(HighBit, LowBit); } | 156 int64_t Name() const { return Func(HighBit, LowBit); } |
| 157 INSTRUCTION_FIELDS_LIST(DEFINE_GETTER) | 157 INSTRUCTION_FIELDS_LIST(DEFINE_GETTER) |
| 158 #undef DEFINE_GETTER | 158 #undef DEFINE_GETTER |
| 159 | 159 |
| 160 // ImmPCRel is a compound field (not present in INSTRUCTION_FIELDS_LIST), | 160 // ImmPCRel is a compound field (not present in INSTRUCTION_FIELDS_LIST), |
| 161 // formed from ImmPCRelLo and ImmPCRelHi. | 161 // formed from ImmPCRelLo and ImmPCRelHi. |
| 162 int ImmPCRel() const { | 162 int ImmPCRel() const { |
| (...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 360 // a PC-relative addressing instruction. | 360 // a PC-relative addressing instruction. |
| 361 void SetImmPCOffsetTarget(Instruction* target); | 361 void SetImmPCOffsetTarget(Instruction* target); |
| 362 // Patch a literal load instruction to load from 'source'. | 362 // Patch a literal load instruction to load from 'source'. |
| 363 void SetImmLLiteral(Instruction* source); | 363 void SetImmLLiteral(Instruction* source); |
| 364 | 364 |
| 365 uint8_t* LiteralAddress() { | 365 uint8_t* LiteralAddress() { |
| 366 int offset = ImmLLiteral() << kLiteralEntrySizeLog2; | 366 int offset = ImmLLiteral() << kLiteralEntrySizeLog2; |
| 367 return reinterpret_cast<uint8_t*>(this) + offset; | 367 return reinterpret_cast<uint8_t*>(this) + offset; |
| 368 } | 368 } |
| 369 | 369 |
| 370 Instruction* NextInstruction() { | 370 enum CheckAlignment { NO_CHECK, CHECK_ALIGNMENT }; |
| 371 return this + kInstructionSize; | 371 |
| 372 V8_INLINE Instruction* InstructionAtOffset( |
| 373 int64_t offset, |
| 374 CheckAlignment check = CHECK_ALIGNMENT) { |
| 375 Address addr = reinterpret_cast<Address>(this) + offset; |
| 376 // The FUZZ_disasm test relies on no check being done. |
| 377 ASSERT(check == NO_CHECK || IsAddressAligned(addr, kInstructionSize)); |
| 378 return Cast(addr); |
| 372 } | 379 } |
| 373 | 380 |
| 374 Instruction* InstructionAtOffset(int64_t offset) { | 381 template<typename T> V8_INLINE static Instruction* Cast(T src) { |
| 375 ASSERT(IsAligned(reinterpret_cast<uintptr_t>(this) + offset, | 382 return reinterpret_cast<Instruction*>(src); |
| 376 kInstructionSize)); | |
| 377 return this + offset; | |
| 378 } | 383 } |
| 379 | 384 |
| 380 template<typename T> static Instruction* Cast(T src) { | 385 V8_INLINE ptrdiff_t DistanceTo(Instruction* target) { |
| 381 return reinterpret_cast<Instruction*>(src); | 386 return reinterpret_cast<Address>(target) - reinterpret_cast<Address>(this); |
| 382 } | 387 } |
| 383 | 388 |
| 384 | 389 |
| 385 void SetPCRelImmTarget(Instruction* target); | 390 void SetPCRelImmTarget(Instruction* target); |
| 386 void SetBranchImmTarget(Instruction* target); | 391 void SetBranchImmTarget(Instruction* target); |
| 387 }; | 392 }; |
| 388 | 393 |
| 389 | 394 |
| 390 // Where Instruction looks at instructions generated by the Assembler, | 395 // Where Instruction looks at instructions generated by the Assembler, |
| 391 // InstructionSequence looks at instructions sequences generated by the | 396 // InstructionSequence looks at instructions sequences generated by the |
| (...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 487 TRACE_ENABLE = 1 << 6, | 492 TRACE_ENABLE = 1 << 6, |
| 488 TRACE_DISABLE = 2 << 6, | 493 TRACE_DISABLE = 2 << 6, |
| 489 TRACE_OVERRIDE = 3 << 6 | 494 TRACE_OVERRIDE = 3 << 6 |
| 490 }; | 495 }; |
| 491 | 496 |
| 492 | 497 |
| 493 } } // namespace v8::internal | 498 } } // namespace v8::internal |
| 494 | 499 |
| 495 | 500 |
| 496 #endif // V8_A64_INSTRUCTIONS_A64_H_ | 501 #endif // V8_A64_INSTRUCTIONS_A64_H_ |
| OLD | NEW |