OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/bit-vector.h" | 5 #include "src/bit-vector.h" |
6 #include "src/compiler/instruction.h" | 6 #include "src/compiler/instruction.h" |
7 #include "src/compiler/register-allocator-verifier.h" | 7 #include "src/compiler/register-allocator-verifier.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 26 matching lines...) Expand all Loading... |
37 for (const MoveOperands* move : *moves) { | 37 for (const MoveOperands* move : *moves) { |
38 if (move->IsRedundant()) continue; | 38 if (move->IsRedundant()) continue; |
39 CHECK(move->source().IsAllocated() || move->source().IsConstant()); | 39 CHECK(move->source().IsAllocated() || move->source().IsConstant()); |
40 CHECK(move->destination().IsAllocated()); | 40 CHECK(move->destination().IsAllocated()); |
41 } | 41 } |
42 } | 42 } |
43 } | 43 } |
44 | 44 |
45 } // namespace | 45 } // namespace |
46 | 46 |
47 | |
48 void RegisterAllocatorVerifier::VerifyInput( | |
49 const OperandConstraint& constraint) { | |
50 CHECK_NE(kSameAsFirst, constraint.type_); | |
51 if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) { | |
52 CHECK_NE(InstructionOperand::kInvalidVirtualRegister, | |
53 constraint.virtual_register_); | |
54 } | |
55 } | |
56 | |
57 | |
58 void RegisterAllocatorVerifier::VerifyTemp( | |
59 const OperandConstraint& constraint) { | |
60 CHECK_NE(kSameAsFirst, constraint.type_); | |
61 CHECK_NE(kImmediate, constraint.type_); | |
62 CHECK_NE(kExplicit, constraint.type_); | |
63 CHECK_NE(kConstant, constraint.type_); | |
64 } | |
65 | |
66 | |
67 void RegisterAllocatorVerifier::VerifyOutput( | |
68 const OperandConstraint& constraint) { | |
69 CHECK_NE(kImmediate, constraint.type_); | |
70 CHECK_NE(kExplicit, constraint.type_); | |
71 CHECK_NE(InstructionOperand::kInvalidVirtualRegister, | |
72 constraint.virtual_register_); | |
73 } | |
74 | |
75 | |
76 RegisterAllocatorVerifier::RegisterAllocatorVerifier( | 47 RegisterAllocatorVerifier::RegisterAllocatorVerifier( |
77 Zone* zone, const RegisterConfiguration* config, | 48 Zone* zone, const RegisterConfiguration* config, |
78 const InstructionSequence* sequence) | 49 const InstructionSequence* sequence) |
79 : zone_(zone), config_(config), sequence_(sequence), constraints_(zone) { | 50 : zone_(zone), |
| 51 config_(config), |
| 52 sequence_(sequence), |
| 53 constraints_(zone), |
| 54 assessments_(zone), |
| 55 outstanding_assessments_(zone), |
| 56 worklist_(zone), |
| 57 seen_(zone) { |
80 constraints_.reserve(sequence->instructions().size()); | 58 constraints_.reserve(sequence->instructions().size()); |
81 // TODO(dcarney): model unique constraints. | 59 // TODO(dcarney): model unique constraints. |
82 // Construct OperandConstraints for all InstructionOperands, eliminating | 60 // Construct OperandConstraints for all InstructionOperands, eliminating |
83 // kSameAsFirst along the way. | 61 // kSameAsFirst along the way. |
84 for (const Instruction* instr : sequence->instructions()) { | 62 for (const Instruction* instr : sequence->instructions()) { |
85 // All gaps should be totally unallocated at this point. | 63 // All gaps should be totally unallocated at this point. |
86 VerifyEmptyGaps(instr); | 64 VerifyEmptyGaps(instr); |
87 const size_t operand_count = OperandCount(instr); | 65 const size_t operand_count = OperandCount(instr); |
88 OperandConstraint* op_constraints = | 66 OperandConstraint* op_constraints = |
89 zone->NewArray<OperandConstraint>(operand_count); | 67 zone->NewArray<OperandConstraint>(operand_count); |
(...skipping 14 matching lines...) Expand all Loading... |
104 op_constraints[count].value_ = op_constraints[0].value_; | 82 op_constraints[count].value_ = op_constraints[0].value_; |
105 } | 83 } |
106 VerifyOutput(op_constraints[count]); | 84 VerifyOutput(op_constraints[count]); |
107 } | 85 } |
108 InstructionConstraint instr_constraint = {instr, operand_count, | 86 InstructionConstraint instr_constraint = {instr, operand_count, |
109 op_constraints}; | 87 op_constraints}; |
110 constraints()->push_back(instr_constraint); | 88 constraints()->push_back(instr_constraint); |
111 } | 89 } |
112 } | 90 } |
113 | 91 |
| 92 void RegisterAllocatorVerifier::VerifyInput( |
| 93 const OperandConstraint& constraint) { |
| 94 CHECK_NE(kSameAsFirst, constraint.type_); |
| 95 if (constraint.type_ != kImmediate && constraint.type_ != kExplicit) { |
| 96 CHECK_NE(InstructionOperand::kInvalidVirtualRegister, |
| 97 constraint.virtual_register_); |
| 98 } |
| 99 } |
| 100 |
| 101 void RegisterAllocatorVerifier::VerifyTemp( |
| 102 const OperandConstraint& constraint) { |
| 103 CHECK_NE(kSameAsFirst, constraint.type_); |
| 104 CHECK_NE(kImmediate, constraint.type_); |
| 105 CHECK_NE(kExplicit, constraint.type_); |
| 106 CHECK_NE(kConstant, constraint.type_); |
| 107 } |
| 108 |
| 109 void RegisterAllocatorVerifier::VerifyOutput( |
| 110 const OperandConstraint& constraint) { |
| 111 CHECK_NE(kImmediate, constraint.type_); |
| 112 CHECK_NE(kExplicit, constraint.type_); |
| 113 CHECK_NE(InstructionOperand::kInvalidVirtualRegister, |
| 114 constraint.virtual_register_); |
| 115 } |
114 | 116 |
115 void RegisterAllocatorVerifier::VerifyAssignment() { | 117 void RegisterAllocatorVerifier::VerifyAssignment() { |
116 CHECK(sequence()->instructions().size() == constraints()->size()); | 118 CHECK(sequence()->instructions().size() == constraints()->size()); |
117 auto instr_it = sequence()->begin(); | 119 auto instr_it = sequence()->begin(); |
118 for (const auto& instr_constraint : *constraints()) { | 120 for (const auto& instr_constraint : *constraints()) { |
119 const Instruction* instr = instr_constraint.instruction_; | 121 const Instruction* instr = instr_constraint.instruction_; |
120 // All gaps should be totally allocated at this point. | 122 // All gaps should be totally allocated at this point. |
121 VerifyAllocatedGaps(instr); | 123 VerifyAllocatedGaps(instr); |
122 const size_t operand_count = instr_constraint.operand_constaints_size_; | 124 const size_t operand_count = instr_constraint.operand_constaints_size_; |
123 const OperandConstraint* op_constraints = | 125 const OperandConstraint* op_constraints = |
124 instr_constraint.operand_constraints_; | 126 instr_constraint.operand_constraints_; |
125 CHECK_EQ(instr, *instr_it); | 127 CHECK_EQ(instr, *instr_it); |
126 CHECK(operand_count == OperandCount(instr)); | 128 CHECK(operand_count == OperandCount(instr)); |
127 size_t count = 0; | 129 size_t count = 0; |
128 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { | 130 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { |
129 CheckConstraint(instr->InputAt(i), &op_constraints[count]); | 131 CheckConstraint(instr->InputAt(i), &op_constraints[count]); |
130 } | 132 } |
131 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { | 133 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { |
132 CheckConstraint(instr->TempAt(i), &op_constraints[count]); | 134 CheckConstraint(instr->TempAt(i), &op_constraints[count]); |
133 } | 135 } |
134 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { | 136 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { |
135 CheckConstraint(instr->OutputAt(i), &op_constraints[count]); | 137 CheckConstraint(instr->OutputAt(i), &op_constraints[count]); |
136 } | 138 } |
137 ++instr_it; | 139 ++instr_it; |
138 } | 140 } |
139 } | 141 } |
140 | 142 |
141 | |
142 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op, | 143 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op, |
143 OperandConstraint* constraint) { | 144 OperandConstraint* constraint) { |
144 constraint->value_ = kMinInt; | 145 constraint->value_ = kMinInt; |
145 constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister; | 146 constraint->virtual_register_ = InstructionOperand::kInvalidVirtualRegister; |
146 if (op->IsConstant()) { | 147 if (op->IsConstant()) { |
147 constraint->type_ = kConstant; | 148 constraint->type_ = kConstant; |
148 constraint->value_ = ConstantOperand::cast(op)->virtual_register(); | 149 constraint->value_ = ConstantOperand::cast(op)->virtual_register(); |
149 constraint->virtual_register_ = constraint->value_; | 150 constraint->virtual_register_ = constraint->value_; |
150 } else if (op->IsExplicit()) { | 151 } else if (op->IsExplicit()) { |
151 constraint->type_ = kExplicit; | 152 constraint->type_ = kExplicit; |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
197 constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot; | 198 constraint->type_ = sequence()->IsFloat(vreg) ? kDoubleSlot : kSlot; |
198 break; | 199 break; |
199 case UnallocatedOperand::SAME_AS_FIRST_INPUT: | 200 case UnallocatedOperand::SAME_AS_FIRST_INPUT: |
200 constraint->type_ = kSameAsFirst; | 201 constraint->type_ = kSameAsFirst; |
201 break; | 202 break; |
202 } | 203 } |
203 } | 204 } |
204 } | 205 } |
205 } | 206 } |
206 | 207 |
207 | |
208 void RegisterAllocatorVerifier::CheckConstraint( | 208 void RegisterAllocatorVerifier::CheckConstraint( |
209 const InstructionOperand* op, const OperandConstraint* constraint) { | 209 const InstructionOperand* op, const OperandConstraint* constraint) { |
210 switch (constraint->type_) { | 210 switch (constraint->type_) { |
211 case kConstant: | 211 case kConstant: |
212 CHECK(op->IsConstant()); | 212 CHECK(op->IsConstant()); |
213 CHECK_EQ(ConstantOperand::cast(op)->virtual_register(), | 213 CHECK_EQ(ConstantOperand::cast(op)->virtual_register(), |
214 constraint->value_); | 214 constraint->value_); |
215 return; | 215 return; |
216 case kImmediate: { | 216 case kImmediate: { |
217 CHECK(op->IsImmediate()); | 217 CHECK(op->IsImmediate()); |
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
257 return; | 257 return; |
258 case kNoneDouble: | 258 case kNoneDouble: |
259 CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot()); | 259 CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot()); |
260 return; | 260 return; |
261 case kSameAsFirst: | 261 case kSameAsFirst: |
262 CHECK(false); | 262 CHECK(false); |
263 return; | 263 return; |
264 } | 264 } |
265 } | 265 } |
266 | 266 |
267 namespace { | 267 void BlockAssessments::PerformMoves(const Instruction* instruction) { |
268 | 268 const ParallelMove* first = |
269 typedef RpoNumber Rpo; | 269 instruction->GetParallelMove(Instruction::GapPosition::START); |
270 | 270 PerformParallelMoves(first); |
271 static const int kInvalidVreg = InstructionOperand::kInvalidVirtualRegister; | 271 const ParallelMove* last = |
272 | 272 instruction->GetParallelMove(Instruction::GapPosition::END); |
273 struct PhiData : public ZoneObject { | 273 PerformParallelMoves(last); |
274 PhiData(Rpo definition_rpo, const PhiInstruction* phi, int first_pred_vreg, | 274 } |
275 const PhiData* first_pred_phi, Zone* zone) | 275 |
276 : definition_rpo(definition_rpo), | 276 void BlockAssessments::PerformParallelMoves(const ParallelMove* moves) { |
277 virtual_register(phi->virtual_register()), | 277 if (moves == nullptr) return; |
278 first_pred_vreg(first_pred_vreg), | 278 |
279 first_pred_phi(first_pred_phi), | 279 CHECK(map_for_moves_.empty()); |
280 operands(zone) { | 280 for (MoveOperands* move : *moves) { |
281 operands.reserve(phi->operands().size()); | 281 if (move->IsEliminated() || move->IsRedundant()) continue; |
282 operands.insert(operands.begin(), phi->operands().begin(), | 282 auto it = map_.find(move->source()); |
283 phi->operands().end()); | 283 // The RHS of a parallel move should have been already assessed. |
284 } | 284 CHECK(it != map_.end()); |
285 const Rpo definition_rpo; | 285 // The LHS of a parallel move should not have been assigned in this |
286 const int virtual_register; | 286 // parallel move. |
287 const int first_pred_vreg; | 287 CHECK(map_for_moves_.find(move->destination()) == map_for_moves_.end()); |
288 const PhiData* first_pred_phi; | 288 // Copy the assessment to the destination. |
289 IntVector operands; | 289 map_for_moves_[move->destination()] = it->second; |
290 }; | 290 } |
291 | 291 for (auto pair : map_for_moves_) { |
292 class PhiMap : public ZoneMap<int, PhiData*>, public ZoneObject { | 292 map_[pair.first] = pair.second; |
293 public: | 293 } |
294 explicit PhiMap(Zone* zone) : ZoneMap<int, PhiData*>(zone) {} | 294 map_for_moves_.clear(); |
295 }; | 295 } |
296 | 296 |
297 struct OperandLess { | 297 void BlockAssessments::DropRegisters() { |
298 bool operator()(const InstructionOperand* a, | 298 for (auto iterator = map().begin(), end = map().end(); iterator != end;) { |
299 const InstructionOperand* b) const { | 299 auto current = iterator; |
300 return a->CompareCanonicalized(*b); | 300 ++iterator; |
301 } | 301 InstructionOperand op = current->first; |
302 }; | 302 if (op.IsAnyRegister()) map().erase(current); |
303 | 303 } |
304 class OperandMap : public ZoneObject { | 304 } |
305 public: | 305 |
306 struct MapValue : public ZoneObject { | 306 BlockAssessments* RegisterAllocatorVerifier::CreateForBlock( |
307 MapValue() | 307 const InstructionBlock* block) { |
308 : incoming(nullptr), | 308 RpoNumber current_block_id = block->rpo_number(); |
309 define_vreg(kInvalidVreg), | 309 |
310 use_vreg(kInvalidVreg), | 310 BlockAssessments* ret = new (zone()) BlockAssessments(zone()); |
311 succ_vreg(kInvalidVreg) {} | 311 if (block->PredecessorCount() == 0) { |
312 MapValue* incoming; // value from first predecessor block. | 312 // TODO(mtrofin): the following check should hold, however, in certain |
313 int define_vreg; // valid if this value was defined in this block. | 313 // unit tests it is invalidated by the last block. Investigate and |
314 int use_vreg; // valid if this value was used in this block. | 314 // normalize the CFG. |
315 int succ_vreg; // valid if propagated back from successor block. | 315 // CHECK(current_block_id.ToInt() == 0); |
316 }; | 316 // The phi size test below is because we can, technically, have phi |
317 | 317 // instructions with one argument. Some tests expose that, too. |
318 class Map | 318 } else if (block->PredecessorCount() == 1 && block->phis().size() == 0) { |
319 : public ZoneMap<const InstructionOperand*, MapValue*, OperandLess> { | 319 const BlockAssessments* prev_block = assessments_[block->predecessors()[0]]; |
320 public: | 320 ret->CopyFrom(prev_block); |
321 explicit Map(Zone* zone) | 321 } else { |
322 : ZoneMap<const InstructionOperand*, MapValue*, OperandLess>(zone) {} | 322 for (RpoNumber pred_id : block->predecessors()) { |
323 | 323 // For every operand coming from any of the predecessors, create an |
324 // Remove all entries with keys not in other. | 324 // Unfinalized assessment. |
325 void Intersect(const Map& other) { | 325 auto iterator = assessments_.find(pred_id); |
326 if (this->empty()) return; | 326 if (iterator == assessments_.end()) { |
327 auto it = this->begin(); | 327 // This block is the head of a loop, and this predecessor is the |
328 OperandLess less; | 328 // loopback |
329 for (const std::pair<const InstructionOperand*, MapValue*>& o : other) { | 329 // arc. |
330 while (less(it->first, o.first)) { | 330 // Validate this is a loop case, otherwise the CFG is malformed. |
331 this->erase(it++); | 331 CHECK(pred_id >= current_block_id); |
332 if (it == this->end()) return; | 332 CHECK(block->IsLoopHeader()); |
| 333 continue; |
| 334 } |
| 335 const BlockAssessments* pred_assessments = iterator->second; |
| 336 CHECK_NOT_NULL(pred_assessments); |
| 337 for (auto pair : pred_assessments->map()) { |
| 338 InstructionOperand operand = pair.first; |
| 339 if (ret->map().find(operand) == ret->map().end()) { |
| 340 ret->map().insert(std::make_pair( |
| 341 operand, new (zone()) PendingAssessment(block, operand))); |
333 } | 342 } |
334 if (it->first->EqualsCanonicalized(*o.first)) { | 343 } |
335 ++it; | 344 } |
336 if (it == this->end()) return; | 345 } |
| 346 return ret; |
| 347 } |
| 348 |
| 349 void RegisterAllocatorVerifier::ValidatePendingAssessment( |
| 350 RpoNumber block_id, InstructionOperand op, PendingAssessment* assessment, |
| 351 int virtual_register) { |
| 352 // When validating a pending assessment, it is possible some of the |
| 353 // assessments |
| 354 // for the original operand (the one where the assessment was created for |
| 355 // first) are also pending. To avoid recursion, we use a work list. To |
| 356 // deal with cycles, we keep a set of seen nodes. |
| 357 CHECK(worklist_.empty()); |
| 358 CHECK(seen_.empty()); |
| 359 worklist_.push(std::make_pair(assessment, virtual_register)); |
| 360 seen_.insert(block_id); |
| 361 |
| 362 while (!worklist_.empty()) { |
| 363 auto work = worklist_.front(); |
| 364 PendingAssessment* current_assessment = work.first; |
| 365 int current_virtual_register = work.second; |
| 366 InstructionOperand current_operand = current_assessment->operand(); |
| 367 worklist_.pop(); |
| 368 |
| 369 const InstructionBlock* origin = current_assessment->origin(); |
| 370 CHECK(origin->PredecessorCount() > 1 || origin->phis().size() > 0); |
| 371 |
| 372 // Check if the virtual register is a phi first, instead of relying on |
| 373 // the incoming assessments. In particular, this handles the case |
| 374 // v1 = phi v0 v0, which structurally is identical to v0 having been |
| 375 // defined at the top of a diamond, and arriving at the node joining the |
| 376 // diamond's branches. |
| 377 const PhiInstruction* phi = nullptr; |
| 378 for (const PhiInstruction* candidate : origin->phis()) { |
| 379 if (candidate->virtual_register() == current_virtual_register) { |
| 380 phi = candidate; |
| 381 break; |
| 382 } |
| 383 } |
| 384 |
| 385 int op_index = 0; |
| 386 for (RpoNumber pred : origin->predecessors()) { |
| 387 int expected = |
| 388 phi != nullptr ? phi->operands()[op_index] : current_virtual_register; |
| 389 |
| 390 ++op_index; |
| 391 auto pred_assignment = assessments_.find(pred); |
| 392 if (pred_assignment == assessments_.end()) { |
| 393 CHECK(origin->IsLoopHeader()); |
| 394 auto todo_iter = outstanding_assessments_.find(pred); |
| 395 DelayedAssessments* set = nullptr; |
| 396 if (todo_iter == outstanding_assessments_.end()) { |
| 397 set = new (zone()) DelayedAssessments(zone()); |
| 398 outstanding_assessments_.insert(std::make_pair(pred, set)); |
337 } else { | 399 } else { |
338 CHECK(less(o.first, it->first)); | 400 set = todo_iter->second; |
339 } | 401 } |
340 } | 402 set->AddDelayedAssessment(current_operand, expected); |
341 } | |
342 }; | |
343 | |
344 explicit OperandMap(Zone* zone) : map_(zone) {} | |
345 | |
346 Map& map() { return map_; } | |
347 | |
348 void RunParallelMoves(Zone* zone, const ParallelMove* moves) { | |
349 // Compute outgoing mappings. | |
350 Map to_insert(zone); | |
351 for (const MoveOperands* move : *moves) { | |
352 if (move->IsEliminated()) continue; | |
353 auto cur = map().find(&move->source()); | |
354 CHECK(cur != map().end()); | |
355 auto res = | |
356 to_insert.insert(std::make_pair(&move->destination(), cur->second)); | |
357 // Ensure injectivity of moves. | |
358 CHECK(res.second); | |
359 } | |
360 // Drop current mappings. | |
361 for (const MoveOperands* move : *moves) { | |
362 if (move->IsEliminated()) continue; | |
363 auto cur = map().find(&move->destination()); | |
364 if (cur != map().end()) map().erase(cur); | |
365 } | |
366 // Insert new values. | |
367 map().insert(to_insert.begin(), to_insert.end()); | |
368 } | |
369 | |
370 void RunGaps(Zone* zone, const Instruction* instr) { | |
371 for (int i = Instruction::FIRST_GAP_POSITION; | |
372 i <= Instruction::LAST_GAP_POSITION; i++) { | |
373 Instruction::GapPosition inner_pos = | |
374 static_cast<Instruction::GapPosition>(i); | |
375 const ParallelMove* move = instr->GetParallelMove(inner_pos); | |
376 if (move == nullptr) continue; | |
377 RunParallelMoves(zone, move); | |
378 } | |
379 } | |
380 | |
381 void Drop(const InstructionOperand* op) { | |
382 auto it = map().find(op); | |
383 if (it != map().end()) map().erase(it); | |
384 } | |
385 | |
386 void DropRegisters(const RegisterConfiguration* config) { | |
387 // TODO(dcarney): sort map by kind and drop range. | |
388 for (auto it = map().begin(); it != map().end();) { | |
389 const InstructionOperand* op = it->first; | |
390 if (op->IsRegister() || op->IsDoubleRegister()) { | |
391 map().erase(it++); | |
392 } else { | |
393 ++it; | |
394 } | |
395 } | |
396 } | |
397 | |
398 MapValue* Define(Zone* zone, const InstructionOperand* op, | |
399 int virtual_register) { | |
400 MapValue* value = new (zone) MapValue(); | |
401 value->define_vreg = virtual_register; | |
402 auto res = map().insert(std::make_pair(op, value)); | |
403 if (!res.second) res.first->second = value; | |
404 return value; | |
405 } | |
406 | |
407 void Use(const InstructionOperand* op, int use_vreg, bool initial_pass) { | |
408 auto it = map().find(op); | |
409 CHECK(it != map().end()); | |
410 MapValue* v = it->second; | |
411 if (v->define_vreg != kInvalidVreg) { | |
412 CHECK_EQ(v->define_vreg, use_vreg); | |
413 } | |
414 // Already used this vreg in this block. | |
415 if (v->use_vreg != kInvalidVreg) { | |
416 CHECK_EQ(v->use_vreg, use_vreg); | |
417 return; | |
418 } | |
419 if (!initial_pass) { | |
420 // A value may be defined and used in this block or the use must have | |
421 // propagated up. | |
422 if (v->succ_vreg != kInvalidVreg) { | |
423 CHECK_EQ(v->succ_vreg, use_vreg); | |
424 } else { | |
425 CHECK_EQ(v->define_vreg, use_vreg); | |
426 } | |
427 // Mark the use. | |
428 it->second->use_vreg = use_vreg; | |
429 return; | |
430 } | |
431 // Go up block list and ensure the correct definition is reached. | |
432 for (; v != nullptr; v = v->incoming) { | |
433 // Value unused in block. | |
434 if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) { | |
435 continue; | 403 continue; |
436 } | 404 } |
437 // Found correct definition or use. | 405 |
438 CHECK(v->define_vreg == use_vreg || v->use_vreg == use_vreg); | 406 const BlockAssessments* pred_assessments = pred_assignment->second; |
439 // Mark the use. | 407 auto found_contribution = pred_assessments->map().find(current_operand); |
440 it->second->use_vreg = use_vreg; | 408 CHECK(found_contribution != pred_assessments->map().end()); |
441 return; | 409 Assessment* contribution = found_contribution->second; |
442 } | 410 |
443 // Use of a non-phi value without definition. | 411 switch (contribution->kind()) { |
444 CHECK(false); | 412 case Final: |
445 } | 413 CHECK_EQ(FinalAssessment::cast(contribution)->virtual_register(), |
446 | 414 expected); |
447 void UsePhi(const InstructionOperand* op, const PhiData* phi, | 415 break; |
448 bool initial_pass) { | 416 case Pending: { |
449 auto it = map().find(op); | 417 // This happens if we have a diamond feeding into another one, and |
450 CHECK(it != map().end()); | 418 // the inner one never being used - other than for carrying the value. |
451 MapValue* v = it->second; | 419 PendingAssessment* next = PendingAssessment::cast(contribution); |
452 int use_vreg = phi->virtual_register; | 420 if (seen_.find(pred) == seen_.end()) { |
453 // Phis are not defined. | 421 worklist_.push({next, expected}); |
454 CHECK_EQ(kInvalidVreg, v->define_vreg); | 422 seen_.insert(pred); |
455 // Already used this vreg in this block. | 423 } |
456 if (v->use_vreg != kInvalidVreg) { | 424 // Note that we do not want to finalize pending assessments at the |
457 CHECK_EQ(v->use_vreg, use_vreg); | 425 // beginning of a block - which is the information we'd have |
458 return; | 426 // available here. This is because this operand may be reused to |
459 } | 427 // define |
460 if (!initial_pass) { | 428 // duplicate phis. |
461 // A used phi must have propagated its use to a predecessor. | 429 break; |
462 CHECK_EQ(v->succ_vreg, use_vreg); | |
463 // Mark the use. | |
464 v->use_vreg = use_vreg; | |
465 return; | |
466 } | |
467 // Go up the block list starting at the first predecessor and ensure this | |
468 // phi has a correct use or definition. | |
469 for (v = v->incoming; v != nullptr; v = v->incoming) { | |
470 // Value unused in block. | |
471 if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) { | |
472 continue; | |
473 } | |
474 // Found correct definition or use. | |
475 if (v->define_vreg != kInvalidVreg) { | |
476 CHECK(v->define_vreg == phi->first_pred_vreg); | |
477 } else if (v->use_vreg != phi->first_pred_vreg) { | |
478 // Walk the phi chain, hunting for a matching phi use. | |
479 const PhiData* p = phi; | |
480 for (; p != nullptr; p = p->first_pred_phi) { | |
481 if (p->virtual_register == v->use_vreg) break; | |
482 } | 430 } |
483 CHECK(p); | 431 } |
484 } | 432 } |
485 // Mark the use. | 433 } |
486 it->second->use_vreg = use_vreg; | 434 seen_.clear(); |
487 return; | 435 CHECK(worklist_.empty()); |
488 } | 436 } |
489 // Use of a phi value without definition. | 437 |
490 UNREACHABLE(); | 438 void RegisterAllocatorVerifier::ValidateUse( |
491 } | 439 RpoNumber block_id, BlockAssessments* current_assessments, |
492 | 440 InstructionOperand op, int virtual_register) { |
493 private: | 441 auto iterator = current_assessments->map().find(op); |
494 Map map_; | 442 // We should have seen this operand before. |
495 DISALLOW_COPY_AND_ASSIGN(OperandMap); | 443 CHECK(iterator != current_assessments->map().end()); |
496 }; | 444 Assessment* assessment = iterator->second; |
497 | 445 |
498 } // namespace | 446 switch (assessment->kind()) { |
499 | 447 case Final: |
500 | 448 // The virtual registers should match. |
501 class RegisterAllocatorVerifier::BlockMaps { | 449 CHECK_EQ(FinalAssessment::cast(assessment)->virtual_register(), |
502 public: | 450 virtual_register); |
503 BlockMaps(Zone* zone, const InstructionSequence* sequence) | 451 break; |
504 : zone_(zone), | 452 case Pending: { |
505 sequence_(sequence), | 453 ValidatePendingAssessment( |
506 phi_map_guard_(sequence->VirtualRegisterCount(), zone), | 454 block_id, op, PendingAssessment::cast(assessment), virtual_register); |
507 phi_map_(zone), | 455 // If everything checks out, we may make the assessment. |
508 incoming_maps_(zone), | 456 current_assessments->map()[op] = |
509 outgoing_maps_(zone) { | 457 new (zone()) FinalAssessment(virtual_register); |
510 InitializePhis(); | 458 break; |
511 InitializeOperandMaps(); | 459 } |
512 } | 460 } |
513 | 461 } |
514 bool IsPhi(int virtual_register) { | 462 |
515 return phi_map_guard_.Contains(virtual_register); | 463 void RegisterAllocatorVerifier::VerifyGapMoves() { |
516 } | 464 CHECK(assessments_.empty()); |
517 | 465 CHECK(outstanding_assessments_.empty()); |
518 const PhiData* GetPhi(int virtual_register) { | 466 const size_t block_count = sequence()->instruction_blocks().size(); |
519 auto it = phi_map_.find(virtual_register); | 467 for (size_t block_index = 0; block_index < block_count; ++block_index) { |
520 CHECK(it != phi_map_.end()); | |
521 return it->second; | |
522 } | |
523 | |
524 OperandMap* InitializeIncoming(size_t block_index, bool initial_pass) { | |
525 return initial_pass ? InitializeFromFirstPredecessor(block_index) | |
526 : InitializeFromIntersection(block_index); | |
527 } | |
528 | |
529 void PropagateUsesBackwards() { | |
530 typedef std::set<size_t, std::greater<size_t>, zone_allocator<size_t>> | |
531 BlockIds; | |
532 BlockIds block_ids((BlockIds::key_compare()), | |
533 zone_allocator<size_t>(zone())); | |
534 // First ensure that incoming contains only keys in all predecessors. | |
535 for (const InstructionBlock* block : sequence()->instruction_blocks()) { | |
536 size_t index = block->rpo_number().ToSize(); | |
537 block_ids.insert(index); | |
538 OperandMap::Map& succ_map = incoming_maps_[index]->map(); | |
539 for (size_t i = 0; i < block->PredecessorCount(); ++i) { | |
540 RpoNumber pred_rpo = block->predecessors()[i]; | |
541 succ_map.Intersect(outgoing_maps_[pred_rpo.ToSize()]->map()); | |
542 } | |
543 } | |
544 // Back propagation fixpoint. | |
545 while (!block_ids.empty()) { | |
546 // Pop highest block_id. | |
547 auto block_id_it = block_ids.begin(); | |
548 const size_t succ_index = *block_id_it; | |
549 block_ids.erase(block_id_it); | |
550 // Propagate uses back to their definition blocks using succ_vreg. | |
551 const InstructionBlock* block = | |
552 sequence()->instruction_blocks()[succ_index]; | |
553 OperandMap::Map& succ_map = incoming_maps_[succ_index]->map(); | |
554 for (size_t i = 0; i < block->PredecessorCount(); ++i) { | |
555 for (auto& succ_val : succ_map) { | |
556 // An incoming map contains no defines. | |
557 CHECK_EQ(kInvalidVreg, succ_val.second->define_vreg); | |
558 // Compute succ_vreg. | |
559 int succ_vreg = succ_val.second->succ_vreg; | |
560 if (succ_vreg == kInvalidVreg) { | |
561 succ_vreg = succ_val.second->use_vreg; | |
562 // Initialize succ_vreg in back propagation chain. | |
563 succ_val.second->succ_vreg = succ_vreg; | |
564 } | |
565 if (succ_vreg == kInvalidVreg) continue; | |
566 // May need to transition phi. | |
567 if (IsPhi(succ_vreg)) { | |
568 const PhiData* phi = GetPhi(succ_vreg); | |
569 if (phi->definition_rpo.ToSize() == succ_index) { | |
570 // phi definition block, transition to pred value. | |
571 succ_vreg = phi->operands[i]; | |
572 } | |
573 } | |
574 // Push succ_vreg up to all predecessors. | |
575 RpoNumber pred_rpo = block->predecessors()[i]; | |
576 OperandMap::Map& pred_map = outgoing_maps_[pred_rpo.ToSize()]->map(); | |
577 auto& pred_val = *pred_map.find(succ_val.first); | |
578 if (pred_val.second->use_vreg != kInvalidVreg) { | |
579 CHECK_EQ(succ_vreg, pred_val.second->use_vreg); | |
580 } | |
581 if (pred_val.second->define_vreg != kInvalidVreg) { | |
582 CHECK_EQ(succ_vreg, pred_val.second->define_vreg); | |
583 } | |
584 if (pred_val.second->succ_vreg != kInvalidVreg) { | |
585 if (succ_vreg != pred_val.second->succ_vreg) { | |
586 // When a block introduces 2 identical phis A and B, and both are | |
587 // operands to other phis C and D, and we optimized the moves | |
588 // defining A or B such that they now appear in the block defining | |
589 // A and B, the back propagation will get confused when visiting | |
590 // upwards from C and D. The operand in the block defining A and B | |
591 // will be attributed to C (or D, depending which of these is | |
592 // visited first). | |
593 CHECK(IsPhi(pred_val.second->succ_vreg)); | |
594 CHECK(IsPhi(succ_vreg)); | |
595 const PhiData* current_phi = GetPhi(succ_vreg); | |
596 const PhiData* assigned_phi = GetPhi(pred_val.second->succ_vreg); | |
597 CHECK_EQ(current_phi->operands.size(), | |
598 assigned_phi->operands.size()); | |
599 CHECK_EQ(current_phi->definition_rpo, | |
600 assigned_phi->definition_rpo); | |
601 for (size_t i = 0; i < current_phi->operands.size(); ++i) { | |
602 CHECK_EQ(current_phi->operands[i], assigned_phi->operands[i]); | |
603 } | |
604 } | |
605 } else { | |
606 pred_val.second->succ_vreg = succ_vreg; | |
607 block_ids.insert(pred_rpo.ToSize()); | |
608 } | |
609 } | |
610 } | |
611 } | |
612 // Clear uses and back links for second pass. | |
613 for (OperandMap* operand_map : incoming_maps_) { | |
614 for (auto& succ_val : operand_map->map()) { | |
615 succ_val.second->incoming = nullptr; | |
616 succ_val.second->use_vreg = kInvalidVreg; | |
617 } | |
618 } | |
619 } | |
620 | |
621 private: | |
622 OperandMap* InitializeFromFirstPredecessor(size_t block_index) { | |
623 OperandMap* to_init = outgoing_maps_[block_index]; | |
624 CHECK(to_init->map().empty()); | |
625 const InstructionBlock* block = | 468 const InstructionBlock* block = |
626 sequence()->instruction_blocks()[block_index]; | 469 sequence()->instruction_blocks()[block_index]; |
627 if (block->predecessors().empty()) return to_init; | 470 BlockAssessments* block_assessments = CreateForBlock(block); |
628 size_t predecessor_index = block->predecessors()[0].ToSize(); | 471 |
629 // Ensure not a backedge. | |
630 CHECK(predecessor_index < block->rpo_number().ToSize()); | |
631 OperandMap* incoming = outgoing_maps_[predecessor_index]; | |
632 // Copy map and replace values. | |
633 to_init->map() = incoming->map(); | |
634 for (auto& it : to_init->map()) { | |
635 OperandMap::MapValue* incoming = it.second; | |
636 it.second = new (zone()) OperandMap::MapValue(); | |
637 it.second->incoming = incoming; | |
638 } | |
639 // Copy to incoming map for second pass. | |
640 incoming_maps_[block_index]->map() = to_init->map(); | |
641 return to_init; | |
642 } | |
643 | |
644 OperandMap* InitializeFromIntersection(size_t block_index) { | |
645 return incoming_maps_[block_index]; | |
646 } | |
647 | |
648 void InitializeOperandMaps() { | |
649 size_t block_count = sequence()->instruction_blocks().size(); | |
650 incoming_maps_.reserve(block_count); | |
651 outgoing_maps_.reserve(block_count); | |
652 for (size_t i = 0; i < block_count; ++i) { | |
653 incoming_maps_.push_back(new (zone()) OperandMap(zone())); | |
654 outgoing_maps_.push_back(new (zone()) OperandMap(zone())); | |
655 } | |
656 } | |
657 | |
658 void InitializePhis() { | |
659 const size_t block_count = sequence()->instruction_blocks().size(); | |
660 for (size_t block_index = 0; block_index < block_count; ++block_index) { | |
661 const InstructionBlock* block = | |
662 sequence()->instruction_blocks()[block_index]; | |
663 for (const PhiInstruction* phi : block->phis()) { | |
664 int first_pred_vreg = phi->operands()[0]; | |
665 const PhiData* first_pred_phi = nullptr; | |
666 if (IsPhi(first_pred_vreg)) { | |
667 first_pred_phi = GetPhi(first_pred_vreg); | |
668 first_pred_vreg = first_pred_phi->first_pred_vreg; | |
669 } | |
670 CHECK(!IsPhi(first_pred_vreg)); | |
671 PhiData* phi_data = new (zone()) PhiData( | |
672 block->rpo_number(), phi, first_pred_vreg, first_pred_phi, zone()); | |
673 auto res = | |
674 phi_map_.insert(std::make_pair(phi->virtual_register(), phi_data)); | |
675 CHECK(res.second); | |
676 phi_map_guard_.Add(phi->virtual_register()); | |
677 } | |
678 } | |
679 } | |
680 | |
681 typedef ZoneVector<OperandMap*> OperandMaps; | |
682 typedef ZoneVector<PhiData*> PhiVector; | |
683 | |
684 Zone* zone() const { return zone_; } | |
685 const InstructionSequence* sequence() const { return sequence_; } | |
686 | |
687 Zone* const zone_; | |
688 const InstructionSequence* const sequence_; | |
689 BitVector phi_map_guard_; | |
690 PhiMap phi_map_; | |
691 OperandMaps incoming_maps_; | |
692 OperandMaps outgoing_maps_; | |
693 }; | |
694 | |
695 | |
696 void RegisterAllocatorVerifier::VerifyGapMoves() { | |
697 BlockMaps block_maps(zone(), sequence()); | |
698 VerifyGapMoves(&block_maps, true); | |
699 block_maps.PropagateUsesBackwards(); | |
700 VerifyGapMoves(&block_maps, false); | |
701 } | |
702 | |
703 | |
704 // Compute and verify outgoing values for every block. | |
705 void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps, | |
706 bool initial_pass) { | |
707 const size_t block_count = sequence()->instruction_blocks().size(); | |
708 for (size_t block_index = 0; block_index < block_count; ++block_index) { | |
709 OperandMap* current = | |
710 block_maps->InitializeIncoming(block_index, initial_pass); | |
711 const InstructionBlock* block = | |
712 sequence()->instruction_blocks()[block_index]; | |
713 for (int instr_index = block->code_start(); instr_index < block->code_end(); | 472 for (int instr_index = block->code_start(); instr_index < block->code_end(); |
714 ++instr_index) { | 473 ++instr_index) { |
715 const InstructionConstraint& instr_constraint = constraints_[instr_index]; | 474 const InstructionConstraint& instr_constraint = constraints_[instr_index]; |
716 const Instruction* instr = instr_constraint.instruction_; | 475 const Instruction* instr = instr_constraint.instruction_; |
717 current->RunGaps(zone(), instr); | 476 block_assessments->PerformMoves(instr); |
| 477 |
718 const OperandConstraint* op_constraints = | 478 const OperandConstraint* op_constraints = |
719 instr_constraint.operand_constraints_; | 479 instr_constraint.operand_constraints_; |
720 size_t count = 0; | 480 size_t count = 0; |
721 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { | 481 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { |
722 if (op_constraints[count].type_ == kImmediate || | 482 if (op_constraints[count].type_ == kImmediate || |
723 op_constraints[count].type_ == kExplicit) { | 483 op_constraints[count].type_ == kExplicit) { |
724 continue; | 484 continue; |
725 } | 485 } |
726 int virtual_register = op_constraints[count].virtual_register_; | 486 int virtual_register = op_constraints[count].virtual_register_; |
727 const InstructionOperand* op = instr->InputAt(i); | 487 InstructionOperand op = *instr->InputAt(i); |
728 if (!block_maps->IsPhi(virtual_register)) { | 488 ValidateUse(block->rpo_number(), block_assessments, op, |
729 current->Use(op, virtual_register, initial_pass); | 489 virtual_register); |
730 } else { | |
731 const PhiData* phi = block_maps->GetPhi(virtual_register); | |
732 current->UsePhi(op, phi, initial_pass); | |
733 } | |
734 } | 490 } |
735 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { | 491 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { |
736 current->Drop(instr->TempAt(i)); | 492 block_assessments->Drop(*instr->TempAt(i)); |
737 } | 493 } |
738 if (instr->IsCall()) { | 494 if (instr->IsCall()) { |
739 current->DropRegisters(config()); | 495 block_assessments->DropRegisters(); |
740 } | 496 } |
741 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { | 497 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { |
742 int virtual_register = op_constraints[count].virtual_register_; | 498 int virtual_register = op_constraints[count].virtual_register_; |
743 OperandMap::MapValue* value = | 499 block_assessments->AddDefinition(*instr->OutputAt(i), virtual_register); |
744 current->Define(zone(), instr->OutputAt(i), virtual_register); | |
745 if (op_constraints[count].type_ == kRegisterAndSlot) { | 500 if (op_constraints[count].type_ == kRegisterAndSlot) { |
746 const AllocatedOperand* reg_op = | 501 const AllocatedOperand* reg_op = |
747 AllocatedOperand::cast(instr->OutputAt(i)); | 502 AllocatedOperand::cast(instr->OutputAt(i)); |
748 MachineRepresentation rep = reg_op->representation(); | 503 MachineRepresentation rep = reg_op->representation(); |
749 const AllocatedOperand* stack_op = AllocatedOperand::New( | 504 const AllocatedOperand* stack_op = AllocatedOperand::New( |
750 zone(), LocationOperand::LocationKind::STACK_SLOT, rep, | 505 zone(), LocationOperand::LocationKind::STACK_SLOT, rep, |
751 op_constraints[i].spilled_slot_); | 506 op_constraints[i].spilled_slot_); |
752 auto insert_result = | 507 block_assessments->AddDefinition(*stack_op, virtual_register); |
753 current->map().insert(std::make_pair(stack_op, value)); | |
754 DCHECK(insert_result.second); | |
755 USE(insert_result); | |
756 } | 508 } |
757 } | 509 } |
758 } | 510 } |
| 511 // Now commit the assessments for this block. If there are any delayed |
| 512 // assessments, ValidatePendingAssessment should see this block, too. |
| 513 assessments_[block->rpo_number()] = block_assessments; |
| 514 |
| 515 auto todo_iter = outstanding_assessments_.find(block->rpo_number()); |
| 516 if (todo_iter == outstanding_assessments_.end()) continue; |
| 517 DelayedAssessments* todo = todo_iter->second; |
| 518 for (auto pair : todo->map()) { |
| 519 InstructionOperand op = pair.first; |
| 520 int vreg = pair.second; |
| 521 auto found_op = block_assessments->map().find(op); |
| 522 CHECK(found_op != block_assessments->map().end()); |
| 523 switch (found_op->second->kind()) { |
| 524 case Final: |
| 525 CHECK(FinalAssessment::cast(found_op->second)->virtual_register() == |
| 526 vreg); |
| 527 break; |
| 528 case Pending: |
| 529 ValidatePendingAssessment(block->rpo_number(), op, |
| 530 PendingAssessment::cast(found_op->second), |
| 531 vreg); |
| 532 block_assessments->map()[op] = new (zone()) FinalAssessment(vreg); |
| 533 break; |
| 534 } |
| 535 } |
759 } | 536 } |
760 } | 537 } |
761 | 538 |
762 } // namespace compiler | 539 } // namespace compiler |
763 } // namespace internal | 540 } // namespace internal |
764 } // namespace v8 | 541 } // namespace v8 |
OLD | NEW |