| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_H_ | 5 #ifndef V8_ARM64_ASSEMBLER_ARM64_H_ |
| 6 #define V8_ARM64_ASSEMBLER_ARM64_H_ | 6 #define V8_ARM64_ASSEMBLER_ARM64_H_ |
| 7 | 7 |
| 8 #include <deque> | 8 #include <deque> |
| 9 #include <list> | 9 #include <list> |
| 10 #include <map> | 10 #include <map> |
| (...skipping 784 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 795 int64_t offset_; | 795 int64_t offset_; |
| 796 AddrMode addrmode_; | 796 AddrMode addrmode_; |
| 797 Shift shift_; | 797 Shift shift_; |
| 798 Extend extend_; | 798 Extend extend_; |
| 799 unsigned shift_amount_; | 799 unsigned shift_amount_; |
| 800 }; | 800 }; |
| 801 | 801 |
| 802 | 802 |
| 803 class ConstPool { | 803 class ConstPool { |
| 804 public: | 804 public: |
| 805 explicit ConstPool(Assembler* assm) | 805 explicit ConstPool(Assembler* assm) : assm_(assm), first_use_(-1) {} |
| 806 : assm_(assm), | 806 // Returns true when we need to write RelocInfo and false when we do not. |
| 807 first_use_(-1), | 807 bool RecordEntry(intptr_t data, RelocInfo::Mode mode); |
| 808 shared_entries_count(0) {} | 808 int EntryCount() const { return static_cast<int>(entries_.size()); } |
| 809 void RecordEntry(intptr_t data, RelocInfo::Mode mode); | 809 bool IsEmpty() const { return entries_.empty(); } |
| 810 int EntryCount() const { | |
| 811 return shared_entries_count + static_cast<int>(unique_entries_.size()); | |
| 812 } | |
| 813 bool IsEmpty() const { | |
| 814 return shared_entries_.empty() && unique_entries_.empty(); | |
| 815 } | |
| 816 // Distance in bytes between the current pc and the first instruction | 810 // Distance in bytes between the current pc and the first instruction |
| 817 // using the pool. If there are no pending entries return kMaxInt. | 811 // using the pool. If there are no pending entries return kMaxInt. |
| 818 int DistanceToFirstUse(); | 812 int DistanceToFirstUse(); |
| 819 // Offset after which instructions using the pool will be out of range. | 813 // Offset after which instructions using the pool will be out of range. |
| 820 int MaxPcOffset(); | 814 int MaxPcOffset(); |
| 821 // Maximum size the constant pool can be with current entries. It always | 815 // Maximum size the constant pool can be with current entries. It always |
| 822 // includes alignment padding and branch over. | 816 // includes alignment padding and branch over. |
| 823 int WorstCaseSize(); | 817 int WorstCaseSize(); |
| 824 // Size in bytes of the literal pool *if* it is emitted at the current | 818 // Size in bytes of the literal pool *if* it is emitted at the current |
| 825 // pc. The size will include the branch over the pool if it was requested. | 819 // pc. The size will include the branch over the pool if it was requested. |
| 826 int SizeIfEmittedAtCurrentPc(bool require_jump); | 820 int SizeIfEmittedAtCurrentPc(bool require_jump); |
| 827 // Emit the literal pool at the current pc with a branch over the pool if | 821 // Emit the literal pool at the current pc with a branch over the pool if |
| 828 // requested. | 822 // requested. |
| 829 void Emit(bool require_jump); | 823 void Emit(bool require_jump); |
| 830 // Discard any pending pool entries. | 824 // Discard any pending pool entries. |
| 831 void Clear(); | 825 void Clear(); |
| 832 | 826 |
| 833 private: | 827 private: |
| 834 bool CanBeShared(RelocInfo::Mode mode); | 828 bool CanBeShared(RelocInfo::Mode mode); |
| 835 void EmitMarker(); | 829 void EmitMarker(); |
| 836 void EmitGuard(); | 830 void EmitGuard(); |
| 837 void EmitEntries(); | 831 void EmitEntries(); |
| 838 | 832 |
| 833 typedef std::map<uint64_t, int> SharedEntryMap; |
| 834 // Adds a shared entry to entries_, using 'entry_map' to determine whether we |
| 835 // already track this entry. Returns true if this is the first time we add |
| 836 // this entry, false otherwise. |
| 837 bool AddSharedEntry(SharedEntryMap& entry_map, uint64_t data, int offset); |
| 838 |
| 839 Assembler* assm_; | 839 Assembler* assm_; |
| 840 // Keep track of the first instruction requiring a constant pool entry | 840 // Keep track of the first instruction requiring a constant pool entry |
| 841 // since the previous constant pool was emitted. | 841 // since the previous constant pool was emitted. |
| 842 int first_use_; | 842 int first_use_; |
| 843 // values, pc offset(s) of entries which can be shared. | 843 |
| 844 std::multimap<uint64_t, int> shared_entries_; | 844 // Map of data to index in entries_ for shared entries. |
| 845 // Number of distinct literal in shared entries. | 845 SharedEntryMap shared_entries_; |
| 846 int shared_entries_count; | 846 |
| 847 // values, pc offset of entries which cannot be shared. | 847 // Map of address of handle to index in entries_. We need to keep track of |
| 848 std::vector<std::pair<uint64_t, int> > unique_entries_; | 848 // code targets separately from other shared entries, as they can be |
| 849 // relocated. |
| 850 SharedEntryMap handle_to_index_map_; |
| 851 |
| 852 // Values, pc offset(s) of entries. Use a vector to preserve the order of |
| 853 // insertion, as the serializer expects code target RelocInfo to point to |
| 854 // constant pool addresses in an ascending order. |
| 855 std::vector<std::pair<uint64_t, std::vector<int> > > entries_; |
| 849 }; | 856 }; |
| 850 | 857 |
| 851 | 858 |
| 852 // ----------------------------------------------------------------------------- | 859 // ----------------------------------------------------------------------------- |
| 853 // Assembler. | 860 // Assembler. |
| 854 | 861 |
| 855 class Assembler : public AssemblerBase { | 862 class Assembler : public AssemblerBase { |
| 856 public: | 863 public: |
| 857 // Create an assembler. Instructions and relocation information are emitted | 864 // Create an assembler. Instructions and relocation information are emitted |
| 858 // into a buffer, with the instructions starting from the beginning and the | 865 // into a buffer, with the instructions starting from the beginning and the |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1000 | 1007 |
| 1001 static constexpr int kPatchDebugBreakSlotAddressOffset = 0; | 1008 static constexpr int kPatchDebugBreakSlotAddressOffset = 0; |
| 1002 | 1009 |
| 1003 // Number of instructions necessary to be able to later patch it to a call. | 1010 // Number of instructions necessary to be able to later patch it to a call. |
| 1004 static constexpr int kDebugBreakSlotInstructions = 5; | 1011 static constexpr int kDebugBreakSlotInstructions = 5; |
| 1005 static constexpr int kDebugBreakSlotLength = | 1012 static constexpr int kDebugBreakSlotLength = |
| 1006 kDebugBreakSlotInstructions * kInstructionSize; | 1013 kDebugBreakSlotInstructions * kInstructionSize; |
| 1007 | 1014 |
| 1008 // Prevent contant pool emission until EndBlockConstPool is called. | 1015 // Prevent contant pool emission until EndBlockConstPool is called. |
| 1009 // Call to this function can be nested but must be followed by an equal | 1016 // Call to this function can be nested but must be followed by an equal |
| 1010 // number of call to EndBlockConstpool. | 1017 // number of calls to EndBlockConstpool. |
| 1011 void StartBlockConstPool(); | 1018 void StartBlockConstPool(); |
| 1012 | 1019 |
| 1013 // Resume constant pool emission. Need to be called as many time as | 1020 // Resume constant pool emission. Need to be called as many time as |
| 1014 // StartBlockConstPool to have an effect. | 1021 // StartBlockConstPool to have an effect. |
| 1015 void EndBlockConstPool(); | 1022 void EndBlockConstPool(); |
| 1016 | 1023 |
| 1017 bool is_const_pool_blocked() const; | 1024 bool is_const_pool_blocked() const; |
| 1018 static bool IsConstantPoolAt(Instruction* instr); | 1025 static bool IsConstantPoolAt(Instruction* instr); |
| 1019 static int ConstantPoolSizeAt(Instruction* instr); | 1026 static int ConstantPoolSizeAt(Instruction* instr); |
| 1020 // See Assembler::CheckConstPool for more info. | 1027 // See Assembler::CheckConstPool for more info. |
| 1021 void EmitPoolGuard(); | 1028 void EmitPoolGuard(); |
| 1022 | 1029 |
| 1023 // Prevent veneer pool emission until EndBlockVeneerPool is called. | 1030 // Prevent veneer pool emission until EndBlockVeneerPool is called. |
| 1024 // Call to this function can be nested but must be followed by an equal | 1031 // Call to this function can be nested but must be followed by an equal |
| 1025 // number of call to EndBlockConstpool. | 1032 // number of calls to EndBlockConstpool. |
| 1026 void StartBlockVeneerPool(); | 1033 void StartBlockVeneerPool(); |
| 1027 | 1034 |
| 1028 // Resume constant pool emission. Need to be called as many time as | 1035 // Resume constant pool emission. Need to be called as many time as |
| 1029 // StartBlockVeneerPool to have an effect. | 1036 // StartBlockVeneerPool to have an effect. |
| 1030 void EndBlockVeneerPool(); | 1037 void EndBlockVeneerPool(); |
| 1031 | 1038 |
| 1032 bool is_veneer_pool_blocked() const { | 1039 bool is_veneer_pool_blocked() const { |
| 1033 return veneer_pool_blocked_nesting_ > 0; | 1040 return veneer_pool_blocked_nesting_ > 0; |
| 1034 } | 1041 } |
| 1035 | 1042 |
| (...skipping 2140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3176 ~BlockPoolsScope() { | 3183 ~BlockPoolsScope() { |
| 3177 assem_->EndBlockPools(); | 3184 assem_->EndBlockPools(); |
| 3178 } | 3185 } |
| 3179 | 3186 |
| 3180 private: | 3187 private: |
| 3181 Assembler* assem_; | 3188 Assembler* assem_; |
| 3182 | 3189 |
| 3183 DISALLOW_IMPLICIT_CONSTRUCTORS(BlockPoolsScope); | 3190 DISALLOW_IMPLICIT_CONSTRUCTORS(BlockPoolsScope); |
| 3184 }; | 3191 }; |
| 3185 | 3192 |
| 3193 // Class for blocking sharing of code targets in constant pool. |
| 3194 class BlockCodeTargetSharingScope { |
| 3195 public: |
| 3196 explicit BlockCodeTargetSharingScope(Assembler* assem) : assem_(nullptr) { |
| 3197 Open(assem); |
| 3198 } |
| 3199 // This constructor does not initialize the scope. The user needs to |
| 3200 // explicitly call Open() before using it. |
| 3201 BlockCodeTargetSharingScope() : assem_(nullptr) {} |
| 3202 ~BlockCodeTargetSharingScope() { Close(); } |
| 3203 void Open(Assembler* assem) { |
| 3204 DCHECK_NULL(assem_); |
| 3205 DCHECK_NOT_NULL(assem); |
| 3206 assem_ = assem; |
| 3207 assem_->StartBlockCodeTargetSharing(); |
| 3208 } |
| 3209 |
| 3210 private: |
| 3211 void Close() { |
| 3212 if (assem_ != nullptr) { |
| 3213 assem_->EndBlockCodeTargetSharing(); |
| 3214 } |
| 3215 } |
| 3216 Assembler* assem_; |
| 3217 |
| 3218 DISALLOW_COPY_AND_ASSIGN(BlockCodeTargetSharingScope); |
| 3219 }; |
| 3220 |
| 3186 protected: | 3221 protected: |
| 3187 inline const Register& AppropriateZeroRegFor(const CPURegister& reg) const; | 3222 inline const Register& AppropriateZeroRegFor(const CPURegister& reg) const; |
| 3188 | 3223 |
| 3189 void LoadStore(const CPURegister& rt, | 3224 void LoadStore(const CPURegister& rt, |
| 3190 const MemOperand& addr, | 3225 const MemOperand& addr, |
| 3191 LoadStoreOp op); | 3226 LoadStoreOp op); |
| 3192 void LoadStorePair(const CPURegister& rt, const CPURegister& rt2, | 3227 void LoadStorePair(const CPURegister& rt, const CPURegister& rt2, |
| 3193 const MemOperand& addr, LoadStorePairOp op); | 3228 const MemOperand& addr, LoadStorePairOp op); |
| 3194 void LoadStoreStruct(const VRegister& vt, const MemOperand& addr, | 3229 void LoadStoreStruct(const VRegister& vt, const MemOperand& addr, |
| 3195 NEONLoadStoreMultiStructOp op); | 3230 NEONLoadStoreMultiStructOp op); |
| (...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3261 const CPURegister& rt2); | 3296 const CPURegister& rt2); |
| 3262 static inline LoadLiteralOp LoadLiteralOpFor(const CPURegister& rt); | 3297 static inline LoadLiteralOp LoadLiteralOpFor(const CPURegister& rt); |
| 3263 | 3298 |
| 3264 // Remove the specified branch from the unbound label link chain. | 3299 // Remove the specified branch from the unbound label link chain. |
| 3265 // If available, a veneer for this label can be used for other branches in the | 3300 // If available, a veneer for this label can be used for other branches in the |
| 3266 // chain if the link chain cannot be fixed up without this branch. | 3301 // chain if the link chain cannot be fixed up without this branch. |
| 3267 void RemoveBranchFromLabelLinkChain(Instruction* branch, | 3302 void RemoveBranchFromLabelLinkChain(Instruction* branch, |
| 3268 Label* label, | 3303 Label* label, |
| 3269 Instruction* label_veneer = NULL); | 3304 Instruction* label_veneer = NULL); |
| 3270 | 3305 |
| 3306 // Prevent sharing of code target constant pool entries until |
| 3307 // EndBlockCodeTargetSharing is called. Calls to this function can be nested |
| 3308 // but must be followed by an equal number of call to |
| 3309 // EndBlockCodeTargetSharing. |
| 3310 void StartBlockCodeTargetSharing() { ++code_target_sharing_blocked_nesting_; } |
| 3311 |
| 3312 // Resume sharing of constant pool code target entries. Needs to be called |
| 3313 // as many times as StartBlockCodeTargetSharing to have an effect. |
| 3314 void EndBlockCodeTargetSharing() { --code_target_sharing_blocked_nesting_; } |
| 3315 |
| 3271 private: | 3316 private: |
| 3272 static uint32_t FPToImm8(double imm); | 3317 static uint32_t FPToImm8(double imm); |
| 3273 | 3318 |
| 3274 // Instruction helpers. | 3319 // Instruction helpers. |
| 3275 void MoveWide(const Register& rd, | 3320 void MoveWide(const Register& rd, |
| 3276 uint64_t imm, | 3321 uint64_t imm, |
| 3277 int shift, | 3322 int shift, |
| 3278 MoveWideImmediateOp mov_op); | 3323 MoveWideImmediateOp mov_op); |
| 3279 void DataProcShiftedRegister(const Register& rd, | 3324 void DataProcShiftedRegister(const Register& rd, |
| 3280 const Register& rn, | 3325 const Register& rn, |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3442 // pool emission checks are interval based this value is an approximation. | 3487 // pool emission checks are interval based this value is an approximation. |
| 3443 static constexpr int kApproxMaxPoolEntryCount = 512; | 3488 static constexpr int kApproxMaxPoolEntryCount = 512; |
| 3444 | 3489 |
| 3445 // Emission of the constant pool may be blocked in some code sequences. | 3490 // Emission of the constant pool may be blocked in some code sequences. |
| 3446 int const_pool_blocked_nesting_; // Block emission if this is not zero. | 3491 int const_pool_blocked_nesting_; // Block emission if this is not zero. |
| 3447 int no_const_pool_before_; // Block emission before this pc offset. | 3492 int no_const_pool_before_; // Block emission before this pc offset. |
| 3448 | 3493 |
| 3449 // Emission of the veneer pools may be blocked in some code sequences. | 3494 // Emission of the veneer pools may be blocked in some code sequences. |
| 3450 int veneer_pool_blocked_nesting_; // Block emission if this is not zero. | 3495 int veneer_pool_blocked_nesting_; // Block emission if this is not zero. |
| 3451 | 3496 |
| 3497 // Sharing of code target entries may be blocked in some code sequences. |
| 3498 int code_target_sharing_blocked_nesting_; |
| 3499 bool IsCodeTargetSharingAllowed() const { |
| 3500 return code_target_sharing_blocked_nesting_ == 0; |
| 3501 } |
| 3502 |
| 3452 // Relocation info generation | 3503 // Relocation info generation |
| 3453 // Each relocation is encoded as a variable size value | 3504 // Each relocation is encoded as a variable size value |
| 3454 static constexpr int kMaxRelocSize = RelocInfoWriter::kMaxSize; | 3505 static constexpr int kMaxRelocSize = RelocInfoWriter::kMaxSize; |
| 3455 RelocInfoWriter reloc_info_writer; | 3506 RelocInfoWriter reloc_info_writer; |
| 3456 | 3507 |
| 3457 // Internal reference positions, required for (potential) patching in | 3508 // Internal reference positions, required for (potential) patching in |
| 3458 // GrowBuffer(); contains only those internal references whose labels | 3509 // GrowBuffer(); contains only those internal references whose labels |
| 3459 // are already bound. | 3510 // are already bound. |
| 3460 std::deque<int> internal_reference_positions_; | 3511 std::deque<int> internal_reference_positions_; |
| 3461 | 3512 |
| (...skipping 165 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3627 public: | 3678 public: |
| 3628 explicit EnsureSpace(Assembler* assembler) { | 3679 explicit EnsureSpace(Assembler* assembler) { |
| 3629 assembler->CheckBufferSpace(); | 3680 assembler->CheckBufferSpace(); |
| 3630 } | 3681 } |
| 3631 }; | 3682 }; |
| 3632 | 3683 |
| 3633 } // namespace internal | 3684 } // namespace internal |
| 3634 } // namespace v8 | 3685 } // namespace v8 |
| 3635 | 3686 |
| 3636 #endif // V8_ARM64_ASSEMBLER_ARM64_H_ | 3687 #endif // V8_ARM64_ASSEMBLER_ARM64_H_ |
| OLD | NEW |