| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/move-optimizer.h" | 5 #include "src/compiler/move-optimizer.h" |
| 6 | 6 |
| 7 namespace v8 { | 7 namespace v8 { |
| 8 namespace internal { | 8 namespace internal { |
| 9 namespace compiler { | 9 namespace compiler { |
| 10 | 10 |
| (...skipping 10 matching lines...) Expand all Loading... |
| 21 return a.destination.CompareCanonicalized(b.destination); | 21 return a.destination.CompareCanonicalized(b.destination); |
| 22 } | 22 } |
| 23 return a.source.CompareCanonicalized(b.source); | 23 return a.source.CompareCanonicalized(b.source); |
| 24 } | 24 } |
| 25 }; | 25 }; |
| 26 | 26 |
| 27 typedef ZoneMap<MoveKey, unsigned, MoveKeyCompare> MoveMap; | 27 typedef ZoneMap<MoveKey, unsigned, MoveKeyCompare> MoveMap; |
| 28 | 28 |
| 29 class OperandSet { | 29 class OperandSet { |
| 30 public: | 30 public: |
| 31 explicit OperandSet(Zone* zone) : set_(zone), fp_reps_(0) {} | 31 explicit OperandSet(ZoneVector<InstructionOperand>* buffer) |
| 32 : set_(buffer), fp_reps_(0) { |
| 33 buffer->clear(); |
| 34 } |
| 32 | 35 |
| 33 void InsertOp(const InstructionOperand& op) { | 36 void InsertOp(const InstructionOperand& op) { |
| 34 set_.insert(op); | 37 set_->push_back(op); |
| 38 |
| 35 if (!kSimpleFPAliasing && op.IsFPRegister()) | 39 if (!kSimpleFPAliasing && op.IsFPRegister()) |
| 36 fp_reps_ |= RepBit(LocationOperand::cast(op).representation()); | 40 fp_reps_ |= RepBit(LocationOperand::cast(op).representation()); |
| 37 } | 41 } |
| 38 | 42 |
| 43 bool Contains(const InstructionOperand& op) const { |
| 44 for (const InstructionOperand& elem : *set_) { |
| 45 if (elem.EqualsCanonicalized(op)) return true; |
| 46 } |
| 47 return false; |
| 48 } |
| 49 |
| 39 bool ContainsOpOrAlias(const InstructionOperand& op) const { | 50 bool ContainsOpOrAlias(const InstructionOperand& op) const { |
| 40 if (set_.find(op) != set_.end()) return true; | 51 if (Contains(op)) return true; |
| 41 | 52 |
| 42 if (!kSimpleFPAliasing && op.IsFPRegister()) { | 53 if (!kSimpleFPAliasing && op.IsFPRegister()) { |
| 43 // Platforms where FP registers have complex aliasing need extra checks. | 54 // Platforms where FP registers have complex aliasing need extra checks. |
| 44 const LocationOperand& loc = LocationOperand::cast(op); | 55 const LocationOperand& loc = LocationOperand::cast(op); |
| 45 MachineRepresentation rep = loc.representation(); | 56 MachineRepresentation rep = loc.representation(); |
| 46 // If haven't encountered mixed rep FP registers, skip the extra checks. | 57 // If haven't encountered mixed rep FP registers, skip the extra checks. |
| 47 if (!HasMixedFPReps(fp_reps_ | RepBit(rep))) return false; | 58 if (!HasMixedFPReps(fp_reps_ | RepBit(rep))) return false; |
| 48 | 59 |
| 49 // Check register against aliasing registers of other FP representations. | 60 // Check register against aliasing registers of other FP representations. |
| 50 MachineRepresentation other_rep1, other_rep2; | 61 MachineRepresentation other_rep1, other_rep2; |
| (...skipping 13 matching lines...) Expand all Loading... |
| 64 default: | 75 default: |
| 65 UNREACHABLE(); | 76 UNREACHABLE(); |
| 66 break; | 77 break; |
| 67 } | 78 } |
| 68 const RegisterConfiguration* config = RegisterConfiguration::Turbofan(); | 79 const RegisterConfiguration* config = RegisterConfiguration::Turbofan(); |
| 69 int base = -1; | 80 int base = -1; |
| 70 int aliases = | 81 int aliases = |
| 71 config->GetAliases(rep, loc.register_code(), other_rep1, &base); | 82 config->GetAliases(rep, loc.register_code(), other_rep1, &base); |
| 72 DCHECK(aliases > 0 || (aliases == 0 && base == -1)); | 83 DCHECK(aliases > 0 || (aliases == 0 && base == -1)); |
| 73 while (aliases--) { | 84 while (aliases--) { |
| 74 if (set_.find(AllocatedOperand(LocationOperand::REGISTER, other_rep1, | 85 if (Contains(AllocatedOperand(LocationOperand::REGISTER, other_rep1, |
| 75 base + aliases)) != set_.end()) | 86 base + aliases))) { |
| 76 return true; | 87 return true; |
| 88 } |
| 77 } | 89 } |
| 78 aliases = config->GetAliases(rep, loc.register_code(), other_rep2, &base); | 90 aliases = config->GetAliases(rep, loc.register_code(), other_rep2, &base); |
| 79 DCHECK(aliases > 0 || (aliases == 0 && base == -1)); | 91 DCHECK(aliases > 0 || (aliases == 0 && base == -1)); |
| 80 while (aliases--) { | 92 while (aliases--) { |
| 81 if (set_.find(AllocatedOperand(LocationOperand::REGISTER, other_rep2, | 93 if (Contains(AllocatedOperand(LocationOperand::REGISTER, other_rep2, |
| 82 base + aliases)) != set_.end()) | 94 base + aliases))) { |
| 83 return true; | 95 return true; |
| 96 } |
| 84 } | 97 } |
| 85 } | 98 } |
| 86 return false; | 99 return false; |
| 87 } | 100 } |
| 88 | 101 |
| 89 private: | 102 private: |
| 90 static int RepBit(MachineRepresentation rep) { | 103 static int RepBit(MachineRepresentation rep) { |
| 91 return 1 << static_cast<int>(rep); | 104 return 1 << static_cast<int>(rep); |
| 92 } | 105 } |
| 93 | 106 |
| 94 static bool HasMixedFPReps(int reps) { | 107 static bool HasMixedFPReps(int reps) { |
| 95 return reps && !base::bits::IsPowerOfTwo32(reps); | 108 return reps && !base::bits::IsPowerOfTwo32(reps); |
| 96 } | 109 } |
| 97 | 110 |
| 98 ZoneSet<InstructionOperand, CompareOperandModuloType> set_; | 111 ZoneVector<InstructionOperand>* set_; |
| 99 int fp_reps_; | 112 int fp_reps_; |
| 100 }; | 113 }; |
| 101 | 114 |
| 102 int FindFirstNonEmptySlot(const Instruction* instr) { | 115 int FindFirstNonEmptySlot(const Instruction* instr) { |
| 103 int i = Instruction::FIRST_GAP_POSITION; | 116 int i = Instruction::FIRST_GAP_POSITION; |
| 104 for (; i <= Instruction::LAST_GAP_POSITION; i++) { | 117 for (; i <= Instruction::LAST_GAP_POSITION; i++) { |
| 105 ParallelMove* moves = instr->parallel_moves()[i]; | 118 ParallelMove* moves = instr->parallel_moves()[i]; |
| 106 if (moves == nullptr) continue; | 119 if (moves == nullptr) continue; |
| 107 for (MoveOperands* move : *moves) { | 120 for (MoveOperands* move : *moves) { |
| 108 if (!move->IsRedundant()) return i; | 121 if (!move->IsRedundant()) return i; |
| 109 move->Eliminate(); | 122 move->Eliminate(); |
| 110 } | 123 } |
| 111 moves->clear(); // Clear this redundant move. | 124 moves->clear(); // Clear this redundant move. |
| 112 } | 125 } |
| 113 return i; | 126 return i; |
| 114 } | 127 } |
| 115 | 128 |
| 116 } // namespace | 129 } // namespace |
| 117 | 130 |
| 118 | |
| 119 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code) | 131 MoveOptimizer::MoveOptimizer(Zone* local_zone, InstructionSequence* code) |
| 120 : local_zone_(local_zone), | 132 : local_zone_(local_zone), |
| 121 code_(code), | 133 code_(code), |
| 122 local_vector_(local_zone) {} | 134 local_vector_(local_zone), |
| 123 | 135 operand_buffer1(local_zone), |
| 136 operand_buffer2(local_zone) {} |
| 124 | 137 |
| 125 void MoveOptimizer::Run() { | 138 void MoveOptimizer::Run() { |
| 126 for (Instruction* instruction : code()->instructions()) { | 139 for (Instruction* instruction : code()->instructions()) { |
| 127 CompressGaps(instruction); | 140 CompressGaps(instruction); |
| 128 } | 141 } |
| 129 for (InstructionBlock* block : code()->instruction_blocks()) { | 142 for (InstructionBlock* block : code()->instruction_blocks()) { |
| 130 CompressBlock(block); | 143 CompressBlock(block); |
| 131 } | 144 } |
| 132 for (InstructionBlock* block : code()->instruction_blocks()) { | 145 for (InstructionBlock* block : code()->instruction_blocks()) { |
| 133 if (block->PredecessorCount() <= 1) continue; | 146 if (block->PredecessorCount() <= 1) continue; |
| (...skipping 19 matching lines...) Expand all Loading... |
| 153 } | 166 } |
| 154 | 167 |
| 155 void MoveOptimizer::RemoveClobberedDestinations(Instruction* instruction) { | 168 void MoveOptimizer::RemoveClobberedDestinations(Instruction* instruction) { |
| 156 if (instruction->IsCall()) return; | 169 if (instruction->IsCall()) return; |
| 157 ParallelMove* moves = instruction->parallel_moves()[0]; | 170 ParallelMove* moves = instruction->parallel_moves()[0]; |
| 158 if (moves == nullptr) return; | 171 if (moves == nullptr) return; |
| 159 | 172 |
| 160 DCHECK(instruction->parallel_moves()[1] == nullptr || | 173 DCHECK(instruction->parallel_moves()[1] == nullptr || |
| 161 instruction->parallel_moves()[1]->empty()); | 174 instruction->parallel_moves()[1]->empty()); |
| 162 | 175 |
| 163 OperandSet outputs(local_zone()); | 176 OperandSet outputs(&operand_buffer1); |
| 164 OperandSet inputs(local_zone()); | 177 OperandSet inputs(&operand_buffer2); |
| 165 | 178 |
| 166 // Outputs and temps are treated together as potentially clobbering a | 179 // Outputs and temps are treated together as potentially clobbering a |
| 167 // destination operand. | 180 // destination operand. |
| 168 for (size_t i = 0; i < instruction->OutputCount(); ++i) { | 181 for (size_t i = 0; i < instruction->OutputCount(); ++i) { |
| 169 outputs.InsertOp(*instruction->OutputAt(i)); | 182 outputs.InsertOp(*instruction->OutputAt(i)); |
| 170 } | 183 } |
| 171 for (size_t i = 0; i < instruction->TempCount(); ++i) { | 184 for (size_t i = 0; i < instruction->TempCount(); ++i) { |
| 172 outputs.InsertOp(*instruction->TempAt(i)); | 185 outputs.InsertOp(*instruction->TempAt(i)); |
| 173 } | 186 } |
| 174 | 187 |
| (...skipping 20 matching lines...) Expand all Loading... |
| 195 } | 208 } |
| 196 } | 209 } |
| 197 } | 210 } |
| 198 | 211 |
| 199 void MoveOptimizer::MigrateMoves(Instruction* to, Instruction* from) { | 212 void MoveOptimizer::MigrateMoves(Instruction* to, Instruction* from) { |
| 200 if (from->IsCall()) return; | 213 if (from->IsCall()) return; |
| 201 | 214 |
| 202 ParallelMove* from_moves = from->parallel_moves()[0]; | 215 ParallelMove* from_moves = from->parallel_moves()[0]; |
| 203 if (from_moves == nullptr || from_moves->empty()) return; | 216 if (from_moves == nullptr || from_moves->empty()) return; |
| 204 | 217 |
| 205 OperandSet dst_cant_be(local_zone()); | 218 OperandSet dst_cant_be(&operand_buffer1); |
| 206 OperandSet src_cant_be(local_zone()); | 219 OperandSet src_cant_be(&operand_buffer2); |
| 207 | 220 |
| 208 // If an operand is an input to the instruction, we cannot move assignments | 221 // If an operand is an input to the instruction, we cannot move assignments |
| 209 // where it appears on the LHS. | 222 // where it appears on the LHS. |
| 210 for (size_t i = 0; i < from->InputCount(); ++i) { | 223 for (size_t i = 0; i < from->InputCount(); ++i) { |
| 211 dst_cant_be.InsertOp(*from->InputAt(i)); | 224 dst_cant_be.InsertOp(*from->InputAt(i)); |
| 212 } | 225 } |
| 213 // If an operand is output to the instruction, we cannot move assignments | 226 // If an operand is output to the instruction, we cannot move assignments |
| 214 // where it appears on the RHS, because we would lose its value before the | 227 // where it appears on the RHS, because we would lose its value before the |
| 215 // instruction. | 228 // instruction. |
| 216 // Same for temp operands. | 229 // Same for temp operands. |
| (...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 410 } | 423 } |
| 411 } | 424 } |
| 412 if (move_map.empty() || correct_counts == 0) return; | 425 if (move_map.empty() || correct_counts == 0) return; |
| 413 | 426 |
| 414 // Find insertion point. | 427 // Find insertion point. |
| 415 Instruction* instr = code()->instructions()[block->first_instruction_index()]; | 428 Instruction* instr = code()->instructions()[block->first_instruction_index()]; |
| 416 | 429 |
| 417 if (correct_counts != move_map.size()) { | 430 if (correct_counts != move_map.size()) { |
| 418 // Moves that are unique to each predecessor won't be pushed to the common | 431 // Moves that are unique to each predecessor won't be pushed to the common |
| 419 // successor. | 432 // successor. |
| 420 OperandSet conflicting_srcs(local_zone()); | 433 OperandSet conflicting_srcs(&operand_buffer1); |
| 421 for (auto iter = move_map.begin(), end = move_map.end(); iter != end;) { | 434 for (auto iter = move_map.begin(), end = move_map.end(); iter != end;) { |
| 422 auto current = iter; | 435 auto current = iter; |
| 423 ++iter; | 436 ++iter; |
| 424 if (current->second != block->PredecessorCount()) { | 437 if (current->second != block->PredecessorCount()) { |
| 425 InstructionOperand dest = current->first.destination; | 438 InstructionOperand dest = current->first.destination; |
| 426 // Not all the moves in all the gaps are the same. Maybe some are. If | 439 // Not all the moves in all the gaps are the same. Maybe some are. If |
| 427 // there are such moves, we could move them, but the destination of the | 440 // there are such moves, we could move them, but the destination of the |
| 428 // moves staying behind can't appear as a source of a common move, | 441 // moves staying behind can't appear as a source of a common move, |
| 429 // because the move staying behind will clobber this destination. | 442 // because the move staying behind will clobber this destination. |
| 430 conflicting_srcs.InsertOp(dest); | 443 conflicting_srcs.InsertOp(dest); |
| (...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 540 static_cast<Instruction::GapPosition>(1), code_zone()); | 553 static_cast<Instruction::GapPosition>(1), code_zone()); |
| 541 slot_1->AddMove(group_begin->destination(), load->destination()); | 554 slot_1->AddMove(group_begin->destination(), load->destination()); |
| 542 load->Eliminate(); | 555 load->Eliminate(); |
| 543 } | 556 } |
| 544 loads.clear(); | 557 loads.clear(); |
| 545 } | 558 } |
| 546 | 559 |
| 547 } // namespace compiler | 560 } // namespace compiler |
| 548 } // namespace internal | 561 } // namespace internal |
| 549 } // namespace v8 | 562 } // namespace v8 |
| OLD | NEW |