Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/instruction.h" | 5 #include "src/compiler/instruction.h" |
| 6 #include "src/compiler/register-allocator-verifier.h" | 6 #include "src/compiler/register-allocator-verifier.h" |
| 7 | 7 |
| 8 namespace v8 { | 8 namespace v8 { |
| 9 namespace internal { | 9 namespace internal { |
| 10 namespace compiler { | 10 namespace compiler { |
| 11 | 11 |
| 12 static size_t OperandCount(const Instruction* instr) { | 12 static size_t OperandCount(const Instruction* instr) { |
| 13 return instr->InputCount() + instr->OutputCount() + instr->TempCount(); | 13 return instr->InputCount() + instr->OutputCount() + instr->TempCount(); |
| 14 } | 14 } |
| 15 | 15 |
| 16 | 16 |
| 17 static void VerifyGapEmpty(const GapInstruction* gap) { | |
| 18 for (int i = GapInstruction::FIRST_INNER_POSITION; | |
| 19 i <= GapInstruction::LAST_INNER_POSITION; i++) { | |
| 20 GapInstruction::InnerPosition inner_pos = | |
| 21 static_cast<GapInstruction::InnerPosition>(i); | |
| 22 CHECK_EQ(NULL, gap->GetParallelMove(inner_pos)); | |
| 23 } | |
| 24 } | |
| 25 | |
| 26 | |
| 27 void RegisterAllocatorVerifier::VerifyInput( | |
| 28 const OperandConstraint& constraint) { | |
| 29 CHECK_NE(kSameAsFirst, constraint.type_); | |
| 30 if (constraint.type_ != kImmediate) { | |
| 31 CHECK_NE(UnallocatedOperand::kInvalidVirtualRegister, | |
| 32 constraint.virtual_register_); | |
| 33 } | |
| 34 } | |
| 35 | |
| 36 | |
| 37 void RegisterAllocatorVerifier::VerifyTemp( | |
| 38 const OperandConstraint& constraint) { | |
| 39 CHECK_NE(kSameAsFirst, constraint.type_); | |
| 40 CHECK_NE(kImmediate, constraint.type_); | |
| 41 CHECK_NE(kConstant, constraint.type_); | |
| 42 CHECK_EQ(UnallocatedOperand::kInvalidVirtualRegister, | |
| 43 constraint.virtual_register_); | |
| 44 } | |
| 45 | |
| 46 | |
| 47 void RegisterAllocatorVerifier::VerifyOutput( | |
| 48 const OperandConstraint& constraint) { | |
| 49 CHECK_NE(kImmediate, constraint.type_); | |
| 50 CHECK_NE(UnallocatedOperand::kInvalidVirtualRegister, | |
| 51 constraint.virtual_register_); | |
| 52 } | |
| 53 | |
| 54 | |
| 17 RegisterAllocatorVerifier::RegisterAllocatorVerifier( | 55 RegisterAllocatorVerifier::RegisterAllocatorVerifier( |
| 18 Zone* zone, const InstructionSequence* sequence) | 56 Zone* zone, const RegisterConfiguration* config, |
| 19 : sequence_(sequence), constraints_(zone) { | 57 const InstructionSequence* sequence) |
| 58 : zone_(zone), config_(config), sequence_(sequence), constraints_(zone) { | |
| 20 constraints_.reserve(sequence->instructions().size()); | 59 constraints_.reserve(sequence->instructions().size()); |
| 60 // TODO(dcarney): model unique constraints. | |
| 61 // Construct OperandConstraints for all InstructionOperands, eliminating | |
| 62 // kSameAsFirst along the way. | |
| 21 for (const auto* instr : sequence->instructions()) { | 63 for (const auto* instr : sequence->instructions()) { |
| 22 const size_t operand_count = OperandCount(instr); | 64 const size_t operand_count = OperandCount(instr); |
| 23 auto* op_constraints = | 65 auto* op_constraints = |
| 24 zone->NewArray<OperandConstraint>(static_cast<int>(operand_count)); | 66 zone->NewArray<OperandConstraint>(static_cast<int>(operand_count)); |
| 25 // Construct OperandConstraints for all InstructionOperands, eliminating | |
| 26 // kSameAsFirst along the way. | |
| 27 size_t count = 0; | 67 size_t count = 0; |
| 28 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { | 68 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { |
| 29 BuildConstraint(instr->InputAt(i), &op_constraints[count]); | 69 BuildConstraint(instr->InputAt(i), &op_constraints[count]); |
| 30 CHECK_NE(kSameAsFirst, op_constraints[count].type_); | 70 VerifyInput(op_constraints[count]); |
| 71 } | |
| 72 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { | |
| 73 BuildConstraint(instr->TempAt(i), &op_constraints[count]); | |
| 74 VerifyTemp(op_constraints[count]); | |
| 31 } | 75 } |
| 32 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { | 76 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { |
| 33 BuildConstraint(instr->OutputAt(i), &op_constraints[count]); | 77 BuildConstraint(instr->OutputAt(i), &op_constraints[count]); |
| 34 if (op_constraints[count].type_ == kSameAsFirst) { | 78 if (op_constraints[count].type_ == kSameAsFirst) { |
| 35 CHECK(instr->InputCount() > 0); | 79 CHECK(instr->InputCount() > 0); |
| 36 op_constraints[count] = op_constraints[0]; | 80 op_constraints[count].type_ = op_constraints[0].type_; |
| 81 op_constraints[count].value_ = op_constraints[0].value_; | |
| 37 } | 82 } |
| 38 } | 83 VerifyOutput(op_constraints[count]); |
| 39 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { | |
| 40 BuildConstraint(instr->TempAt(i), &op_constraints[count]); | |
| 41 CHECK_NE(kSameAsFirst, op_constraints[count].type_); | |
| 42 } | 84 } |
| 43 // All gaps should be totally unallocated at this point. | 85 // All gaps should be totally unallocated at this point. |
| 44 if (instr->IsGapMoves()) { | 86 if (instr->IsGapMoves()) { |
| 45 const auto* gap = GapInstruction::cast(instr); | 87 CHECK(operand_count == 0); |
| 46 for (int i = GapInstruction::FIRST_INNER_POSITION; | 88 VerifyGapEmpty(GapInstruction::cast(instr)); |
| 47 i <= GapInstruction::LAST_INNER_POSITION; i++) { | |
| 48 GapInstruction::InnerPosition inner_pos = | |
| 49 static_cast<GapInstruction::InnerPosition>(i); | |
| 50 CHECK_EQ(NULL, gap->GetParallelMove(inner_pos)); | |
| 51 } | |
| 52 } | 89 } |
| 53 InstructionConstraint instr_constraint = {instr, operand_count, | 90 InstructionConstraint instr_constraint = {instr, operand_count, |
| 54 op_constraints}; | 91 op_constraints}; |
| 55 constraints()->push_back(instr_constraint); | 92 constraints()->push_back(instr_constraint); |
| 56 } | 93 } |
| 57 } | 94 } |
| 58 | 95 |
| 59 | 96 |
| 60 void RegisterAllocatorVerifier::VerifyAssignment() { | 97 void RegisterAllocatorVerifier::VerifyAssignment() { |
| 61 CHECK(sequence()->instructions().size() == constraints()->size()); | 98 CHECK(sequence()->instructions().size() == constraints()->size()); |
| 62 auto instr_it = sequence()->begin(); | 99 auto instr_it = sequence()->begin(); |
| 63 for (const auto& instr_constraint : *constraints()) { | 100 for (const auto& instr_constraint : *constraints()) { |
| 64 const auto* instr = instr_constraint.instruction_; | 101 const auto* instr = instr_constraint.instruction_; |
| 65 const size_t operand_count = instr_constraint.operand_constaints_size_; | 102 const size_t operand_count = instr_constraint.operand_constaints_size_; |
| 66 const auto* op_constraints = instr_constraint.operand_constraints_; | 103 const auto* op_constraints = instr_constraint.operand_constraints_; |
| 67 CHECK_EQ(instr, *instr_it); | 104 CHECK_EQ(instr, *instr_it); |
| 68 CHECK(operand_count == OperandCount(instr)); | 105 CHECK(operand_count == OperandCount(instr)); |
| 69 size_t count = 0; | 106 size_t count = 0; |
| 70 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { | 107 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { |
| 71 CheckConstraint(instr->InputAt(i), &op_constraints[count]); | 108 CheckConstraint(instr->InputAt(i), &op_constraints[count]); |
| 72 } | 109 } |
| 110 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { | |
| 111 CheckConstraint(instr->TempAt(i), &op_constraints[count]); | |
| 112 } | |
| 73 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { | 113 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { |
| 74 CheckConstraint(instr->OutputAt(i), &op_constraints[count]); | 114 CheckConstraint(instr->OutputAt(i), &op_constraints[count]); |
| 75 } | 115 } |
| 76 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { | |
| 77 CheckConstraint(instr->TempAt(i), &op_constraints[count]); | |
| 78 } | |
| 79 ++instr_it; | 116 ++instr_it; |
| 80 } | 117 } |
| 81 } | 118 } |
| 82 | 119 |
| 83 | 120 |
| 84 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op, | 121 void RegisterAllocatorVerifier::BuildConstraint(const InstructionOperand* op, |
| 85 OperandConstraint* constraint) { | 122 OperandConstraint* constraint) { |
| 86 constraint->value_ = kMinInt; | 123 constraint->value_ = kMinInt; |
| 124 constraint->virtual_register_ = UnallocatedOperand::kInvalidVirtualRegister; | |
| 87 if (op->IsConstant()) { | 125 if (op->IsConstant()) { |
| 88 constraint->type_ = kConstant; | 126 constraint->type_ = kConstant; |
| 89 constraint->value_ = ConstantOperand::cast(op)->index(); | 127 constraint->value_ = ConstantOperand::cast(op)->index(); |
| 128 constraint->virtual_register_ = constraint->value_; | |
| 90 } else if (op->IsImmediate()) { | 129 } else if (op->IsImmediate()) { |
| 91 constraint->type_ = kImmediate; | 130 constraint->type_ = kImmediate; |
| 92 constraint->value_ = ImmediateOperand::cast(op)->index(); | 131 constraint->value_ = ImmediateOperand::cast(op)->index(); |
| 93 } else { | 132 } else { |
| 94 CHECK(op->IsUnallocated()); | 133 CHECK(op->IsUnallocated()); |
| 95 const auto* unallocated = UnallocatedOperand::cast(op); | 134 const auto* unallocated = UnallocatedOperand::cast(op); |
| 96 int vreg = unallocated->virtual_register(); | 135 int vreg = unallocated->virtual_register(); |
| 136 constraint->virtual_register_ = vreg; | |
| 97 if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) { | 137 if (unallocated->basic_policy() == UnallocatedOperand::FIXED_SLOT) { |
| 98 constraint->type_ = kFixedSlot; | 138 constraint->type_ = kFixedSlot; |
| 99 constraint->value_ = unallocated->fixed_slot_index(); | 139 constraint->value_ = unallocated->fixed_slot_index(); |
| 100 } else { | 140 } else { |
| 101 switch (unallocated->extended_policy()) { | 141 switch (unallocated->extended_policy()) { |
| 102 case UnallocatedOperand::ANY: | 142 case UnallocatedOperand::ANY: |
| 103 CHECK(false); | 143 CHECK(false); |
| 104 break; | 144 break; |
| 105 case UnallocatedOperand::NONE: | 145 case UnallocatedOperand::NONE: |
| 106 if (sequence()->IsDouble(vreg)) { | 146 if (sequence()->IsDouble(vreg)) { |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 167 return; | 207 return; |
| 168 case kNoneDouble: | 208 case kNoneDouble: |
| 169 CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot()); | 209 CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot()); |
| 170 return; | 210 return; |
| 171 case kSameAsFirst: | 211 case kSameAsFirst: |
| 172 CHECK(false); | 212 CHECK(false); |
| 173 return; | 213 return; |
| 174 } | 214 } |
| 175 } | 215 } |
| 176 | 216 |
| 217 | |
| 218 class RegisterAllocatorVerifier::OutgoingMapping : public ZoneObject { | |
| 219 public: | |
| 220 struct OperandLess { | |
| 221 bool operator()(const InstructionOperand* a, | |
| 222 const InstructionOperand* b) const { | |
| 223 if (a->kind() == b->kind()) return a->index() < b->index(); | |
| 224 return a->kind() < b->kind(); | |
| 225 } | |
| 226 }; | |
| 227 | |
| 228 typedef std::map< | |
| 229 const InstructionOperand*, int, OperandLess, | |
| 230 zone_allocator<std::pair<const InstructionOperand*, const int>>> | |
| 231 LocationMap; | |
| 232 | |
| 233 explicit OutgoingMapping(Zone* zone) | |
| 234 : locations_(LocationMap::key_compare(), | |
| 235 LocationMap::allocator_type(zone)), | |
| 236 predecessor_intersection_(LocationMap::key_compare(), | |
| 237 LocationMap::allocator_type(zone)) {} | |
| 238 | |
| 239 LocationMap* locations() { return &locations_; } | |
| 240 | |
| 241 void RunPhis(const InstructionSequence* sequence, | |
| 242 const InstructionBlock* block, size_t phi_index) { | |
| 243 // This operation is only valid in edge split form. | |
| 244 size_t predecessor_index = block->predecessors()[phi_index].ToSize(); | |
| 245 CHECK(sequence->instruction_blocks()[predecessor_index]->SuccessorCount() == | |
| 246 1); | |
| 247 const auto* gap = sequence->GetBlockStart(block->rpo_number()); | |
| 248 // The first moves in the BlockStartInstruction are the phi moves inserted | |
| 249 // by ResolvePhis. | |
| 250 const ParallelMove* move = gap->GetParallelMove(GapInstruction::START); | |
| 251 CHECK_NE(nullptr, move); | |
| 252 const auto* move_ops = move->move_operands(); | |
| 253 CHECK(block->phis().size() <= static_cast<size_t>(move_ops->length())); | |
| 254 auto move_it = move_ops->begin(); | |
| 255 for (const auto* phi : block->phis()) { | |
| 256 const auto* op = move_it->source(); | |
| 257 auto it = locations()->find(op); | |
| 258 CHECK(it != locations()->end()); | |
| 259 CHECK_EQ(it->second, phi->operands()[phi_index]); | |
| 260 it->second = phi->virtual_register(); | |
| 261 ++move_it; | |
| 262 } | |
| 263 } | |
| 264 | |
| 265 void RunGapInstruction(Zone* zone, const GapInstruction* gap) { | |
| 266 for (int i = GapInstruction::FIRST_INNER_POSITION; | |
| 267 i <= GapInstruction::LAST_INNER_POSITION; i++) { | |
| 268 GapInstruction::InnerPosition inner_pos = | |
| 269 static_cast<GapInstruction::InnerPosition>(i); | |
| 270 const ParallelMove* move = gap->GetParallelMove(inner_pos); | |
| 271 if (move == nullptr) continue; | |
| 272 RunParallelMoves(zone, move); | |
| 273 } | |
| 274 } | |
| 275 | |
| 276 void RunParallelMoves(Zone* zone, const ParallelMove* move) { | |
| 277 // Compute outgoing mappings. | |
| 278 LocationMap to_insert((LocationMap::key_compare()), | |
| 279 LocationMap::allocator_type(zone)); | |
| 280 auto* moves = move->move_operands(); | |
| 281 for (auto i = moves->begin(); i != moves->end(); ++i) { | |
| 282 if (i->IsEliminated()) continue; | |
| 283 CHECK(i->source()->kind() != InstructionOperand::INVALID); | |
| 284 auto cur = locations()->find(i->source()); | |
| 285 CHECK(cur != locations()->end()); | |
| 286 if (i->destination()->kind() == InstructionOperand::INVALID) continue; | |
| 287 to_insert.insert(std::make_pair(i->destination(), cur->second)); | |
| 288 } | |
| 289 // Drop current mappings. | |
| 290 for (auto i = moves->begin(); i != moves->end(); ++i) { | |
| 291 if (i->IsEliminated()) continue; | |
| 292 auto cur = locations()->find(i->destination()); | |
| 293 if (cur != locations()->end()) locations()->erase(cur); | |
| 294 } | |
| 295 // Insert new values. | |
| 296 locations()->insert(to_insert.begin(), to_insert.end()); | |
| 297 } | |
| 298 | |
| 299 void Map(const InstructionOperand* op, int virtual_register) { | |
| 300 locations()->insert(std::make_pair(op, virtual_register)); | |
| 301 } | |
| 302 | |
| 303 void Drop(const InstructionOperand* op) { | |
| 304 auto it = locations()->find(op); | |
| 305 if (it != locations()->end()) locations()->erase(it); | |
| 306 } | |
| 307 | |
| 308 void DropRegisters(const RegisterConfiguration* config) { | |
| 309 for (int i = 0; i < config->num_general_registers(); ++i) { | |
| 310 InstructionOperand op(InstructionOperand::REGISTER, i); | |
| 311 Drop(&op); | |
| 312 } | |
| 313 for (int i = 0; i < config->num_double_registers(); ++i) { | |
| 314 InstructionOperand op(InstructionOperand::DOUBLE_REGISTER, i); | |
| 315 Drop(&op); | |
| 316 } | |
| 317 } | |
| 318 | |
| 319 void InitializeFromFirstPredecessor(const InstructionSequence* sequence, | |
| 320 const OutgoingMappings* outgoing_mappings, | |
| 321 const InstructionBlock* block) { | |
| 322 if (block->predecessors().empty()) return; | |
| 323 size_t predecessor_index = block->predecessors()[0].ToSize(); | |
| 324 CHECK(predecessor_index < block->rpo_number().ToSize()); | |
| 325 auto* incoming = outgoing_mappings->at(predecessor_index); | |
| 326 if (block->PredecessorCount() > 1) { | |
| 327 // Update incoming map with phis. The remaining phis will be checked later | |
| 328 // as their mappings are not guaranteed to exist yet. | |
| 329 incoming->RunPhis(sequence, block, 0); | |
| 330 } | |
| 331 // Now initialize outgoing mapping for this block with incoming mapping. | |
| 332 CHECK(locations_.empty()); | |
| 333 locations_ = incoming->locations_; | |
| 334 } | |
| 335 | |
| 336 void InitializeFromIntersection() { locations_ = predecessor_intersection_; } | |
| 337 | |
| 338 void InitializeIntersection(const OutgoingMapping* incoming) { | |
| 339 CHECK(predecessor_intersection_.empty()); | |
| 340 predecessor_intersection_ = incoming->locations_; | |
| 341 } | |
| 342 | |
| 343 void Intersect(const OutgoingMapping* other) { | |
| 344 if (predecessor_intersection_.empty()) return; | |
| 345 auto it = predecessor_intersection_.begin(); | |
| 346 OperandLess less; | |
| 347 for (const auto& o : other->locations_) { | |
| 348 while (less(it->first, o.first)) { | |
| 349 ++it; | |
| 350 if (it == predecessor_intersection_.end()) return; | |
| 351 } | |
| 352 if (it->first->Equals(o.first)) { | |
| 353 if (o.second != it->second) { | |
| 354 predecessor_intersection_.erase(it++); | |
| 355 } else { | |
| 356 ++it; | |
| 357 } | |
| 358 if (it == predecessor_intersection_.end()) return; | |
| 359 } | |
| 360 } | |
| 361 } | |
| 362 | |
| 363 private: | |
| 364 LocationMap locations_; | |
| 365 LocationMap predecessor_intersection_; | |
| 366 | |
| 367 DISALLOW_COPY_AND_ASSIGN(OutgoingMapping); | |
| 368 }; | |
| 369 | |
| 370 | |
| 371 // Verify that all gap moves move the operands for a virtual register into the | |
| 372 // correct location for every instruction. | |
| 373 void RegisterAllocatorVerifier::VerifyGapMoves() { | |
| 374 typedef ZoneVector<OutgoingMapping*> OutgoingMappings; | |
| 375 OutgoingMappings outgoing_mappings( | |
| 376 static_cast<int>(sequence()->instruction_blocks().size()), nullptr, | |
| 377 zone()); | |
| 378 // Construct all mappings, ignoring back edges and multiple entries. | |
| 379 ConstructOutgoingMappings(&outgoing_mappings, true); | |
| 380 // Run all remaining phis and compute the intersection of all predecessor | |
| 381 // mappings. | |
| 382 for (const auto* block : sequence()->instruction_blocks()) { | |
| 383 if (block->PredecessorCount() == 0) continue; | |
| 384 const size_t block_index = block->rpo_number().ToSize(); | |
| 385 auto* mapping = outgoing_mappings[block_index]; | |
| 386 bool initialized = false; | |
| 387 // Walk predecessors in reverse to ensure Intersect is correctly working. | |
| 388 // If it did nothing, the second pass would do exactly what the first pass | |
| 389 // did. | |
| 390 for (size_t phi = block->PredecessorCount() - 1; true; --phi) { | |
|
Jarin
2014/11/12 14:48:47
Please, rename to phi_input (or something that mak
| |
| 391 const size_t pred_block_index = block->predecessors()[phi].ToSize(); | |
| 392 auto* incoming = outgoing_mappings[pred_block_index]; | |
| 393 if (phi != 0) incoming->RunPhis(sequence(), block, phi); | |
| 394 if (!initialized) { | |
| 395 mapping->InitializeIntersection(incoming); | |
| 396 initialized = true; | |
| 397 } else { | |
| 398 mapping->Intersect(incoming); | |
| 399 } | |
| 400 if (phi == 0) break; | |
| 401 } | |
| 402 } | |
| 403 // Construct all mappings again, this time using the instersection mapping | |
| 404 // above as the incoming mapping instead of the result from the first | |
| 405 // predecessor. | |
| 406 ConstructOutgoingMappings(&outgoing_mappings, false); | |
| 407 } | |
| 408 | |
| 409 | |
| 410 void RegisterAllocatorVerifier::ConstructOutgoingMappings( | |
| 411 OutgoingMappings* outgoing_mappings, bool initial_pass) { | |
| 412 // Compute the locations of all virtual registers leaving every block, using | |
| 413 // only the first predecessor as the input locations. | |
| 414 for (const auto* block : sequence()->instruction_blocks()) { | |
| 415 const size_t block_index = block->rpo_number().ToSize(); | |
| 416 auto* current = outgoing_mappings->at(block_index); | |
| 417 CHECK(initial_pass == (current == nullptr)); | |
| 418 // Initialize current. | |
| 419 if (!initial_pass) { | |
| 420 // Skip check second time around for blocks without multiple predecessors | |
| 421 // as we have already executed this in the initial run. | |
| 422 if (block->PredecessorCount() <= 1) continue; | |
| 423 current->InitializeFromIntersection(); | |
| 424 } else { | |
| 425 current = new (zone()) OutgoingMapping(zone()); | |
| 426 outgoing_mappings->at(block_index) = current; | |
| 427 // Copy outgoing values from predecessor block. | |
| 428 current->InitializeFromFirstPredecessor(sequence(), outgoing_mappings, | |
| 429 block); | |
| 430 } | |
| 431 // Update current with gaps and operands for all instructions in block. | |
| 432 for (int instr_index = block->code_start(); instr_index < block->code_end(); | |
| 433 ++instr_index) { | |
| 434 const auto& instr_constraint = constraints_[instr_index]; | |
| 435 const auto* instr = instr_constraint.instruction_; | |
| 436 const auto* op_constraints = instr_constraint.operand_constraints_; | |
| 437 size_t count = 0; | |
| 438 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { | |
| 439 if (op_constraints[count].type_ == kImmediate) continue; | |
| 440 auto it = current->locations()->find(instr->InputAt(i)); | |
| 441 int virtual_register = op_constraints[count].virtual_register_; | |
| 442 CHECK(it != current->locations()->end()); | |
| 443 CHECK_EQ(it->second, virtual_register); | |
| 444 } | |
| 445 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { | |
| 446 current->Drop(instr->TempAt(i)); | |
| 447 } | |
| 448 if (instr->IsCall()) { | |
| 449 current->DropRegisters(config()); | |
| 450 } | |
| 451 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { | |
| 452 current->Drop(instr->OutputAt(i)); | |
| 453 int virtual_register = op_constraints[count].virtual_register_; | |
| 454 current->Map(instr->OutputAt(i), virtual_register); | |
| 455 } | |
| 456 if (instr->IsGapMoves()) { | |
| 457 const auto* gap = GapInstruction::cast(instr); | |
| 458 current->RunGapInstruction(zone(), gap); | |
| 459 } | |
| 460 } | |
| 461 } | |
| 462 } | |
| 463 | |
| 177 } // namespace compiler | 464 } // namespace compiler |
| 178 } // namespace internal | 465 } // namespace internal |
| 179 } // namespace v8 | 466 } // namespace v8 |
| OLD | NEW |