Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(4)

Side by Side Diff: src/compiler/register-allocator-verifier.cc

Issue 875853002: [turbofan] make register allocator verifier independent of phi assignment (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/compiler/register-allocator-verifier.h ('k') | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/bit-vector.h"
5 #include "src/compiler/instruction.h" 6 #include "src/compiler/instruction.h"
6 #include "src/compiler/register-allocator-verifier.h" 7 #include "src/compiler/register-allocator-verifier.h"
7 8
8 namespace v8 { 9 namespace v8 {
9 namespace internal { 10 namespace internal {
10 namespace compiler { 11 namespace compiler {
11 12
12 static size_t OperandCount(const Instruction* instr) { 13 static size_t OperandCount(const Instruction* instr) {
13 return instr->InputCount() + instr->OutputCount() + instr->TempCount(); 14 return instr->InputCount() + instr->OutputCount() + instr->TempCount();
14 } 15 }
(...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after
207 return; 208 return;
208 case kNoneDouble: 209 case kNoneDouble:
209 CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot()); 210 CHECK(op->IsDoubleRegister() || op->IsDoubleStackSlot());
210 return; 211 return;
211 case kSameAsFirst: 212 case kSameAsFirst:
212 CHECK(false); 213 CHECK(false);
213 return; 214 return;
214 } 215 }
215 } 216 }
216 217
218 namespace {
217 219
218 class RegisterAllocatorVerifier::OutgoingMapping : public ZoneObject { 220 typedef BasicBlock::RpoNumber Rpo;
221
222 static const int kInvalidVreg = UnallocatedOperand::kInvalidVirtualRegister;
223
224 struct PhiData : public ZoneObject {
225 PhiData(Rpo definition_rpo, const PhiInstruction* phi, int first_pred_vreg,
226 const PhiData* first_pred_phi, Zone* zone)
227 : definition_rpo(definition_rpo),
228 virtual_register(phi->virtual_register()),
229 first_pred_vreg(first_pred_vreg),
230 first_pred_phi(first_pred_phi),
231 operands(zone) {
232 operands.reserve(phi->operands().size());
233 operands.insert(operands.begin(), phi->operands().begin(),
234 phi->operands().end());
235 }
236 const Rpo definition_rpo;
237 const int virtual_register;
238 const int first_pred_vreg;
239 const PhiData* first_pred_phi;
240 IntVector operands;
241 };
242
243 typedef std::map<int, PhiData*, std::less<int>,
244 zone_allocator<std::pair<int, PhiData*>>> PhiMapBase;
245
246 class PhiMap : public PhiMapBase, public ZoneObject {
219 public: 247 public:
220 struct OperandLess { 248 explicit PhiMap(Zone* zone)
221 bool operator()(const InstructionOperand* a, 249 : PhiMapBase(key_compare(), allocator_type(zone)) {}
222 const InstructionOperand* b) const { 250 };
223 if (a->kind() == b->kind()) return a->index() < b->index(); 251
224 return a->kind() < b->kind(); 252 struct OperandLess {
253 bool operator()(const InstructionOperand* a,
254 const InstructionOperand* b) const {
255 if (a->kind() == b->kind()) return a->index() < b->index();
256 return a->kind() < b->kind();
257 }
258 };
259
260 class OperandMap : public ZoneObject {
261 public:
262 struct MapValue : public ZoneObject {
263 MapValue()
264 : incoming(nullptr),
265 define_vreg(kInvalidVreg),
266 use_vreg(kInvalidVreg),
267 succ_vreg(kInvalidVreg) {}
268 MapValue* incoming; // value from first predecessor block.
269 int define_vreg; // valid if this value was defined in this block.
270 int use_vreg; // valid if this value was used in this block.
271 int succ_vreg; // valid if propagated back from successor block.
272 };
273
274 typedef std::map<
275 const InstructionOperand*, MapValue*, OperandLess,
276 zone_allocator<std::pair<const InstructionOperand*, MapValue*>>> MapBase;
277
278 class Map : public MapBase {
279 public:
280 explicit Map(Zone* zone) : MapBase(key_compare(), allocator_type(zone)) {}
281
282 // Remove all entries with keys not in other.
283 void Intersect(const Map& other) {
284 if (this->empty()) return;
285 auto it = this->begin();
286 OperandLess less;
287 for (const auto& o : other) {
288 while (less(it->first, o.first)) {
289 this->erase(it++);
290 if (it == this->end()) return;
291 }
292 if (it->first->Equals(o.first)) {
293 ++it;
294 if (it == this->end()) return;
295 } else {
296 CHECK(less(o.first, it->first));
297 }
298 }
225 } 299 }
226 }; 300 };
227 301
228 typedef std::map< 302 explicit OperandMap(Zone* zone) : map_(zone) {}
229 const InstructionOperand*, int, OperandLess,
230 zone_allocator<std::pair<const InstructionOperand*, const int>>>
231 LocationMap;
232 303
233 explicit OutgoingMapping(Zone* zone) 304 Map& map() { return map_; }
234 : locations_(LocationMap::key_compare(),
235 LocationMap::allocator_type(zone)),
236 predecessor_intersection_(LocationMap::key_compare(),
237 LocationMap::allocator_type(zone)) {}
238 305
239 LocationMap* locations() { return &locations_; } 306 void RunParallelMoves(Zone* zone, const ParallelMove* move) {
240 307 // Compute outgoing mappings.
241 void RunPhis(const InstructionSequence* sequence, 308 Map to_insert(zone);
242 const InstructionBlock* block, size_t phi_index) { 309 auto moves = move->move_operands();
243 // This operation is only valid in edge split form. 310 for (auto i = moves->begin(); i != moves->end(); ++i) {
244 size_t predecessor_index = block->predecessors()[phi_index].ToSize(); 311 if (i->IsEliminated()) continue;
245 for (const auto* phi : block->phis()) { 312 auto cur = map().find(i->source());
246 CHECK( 313 CHECK(cur != map().end());
247 sequence->instruction_blocks()[predecessor_index]->SuccessorCount() == 314 to_insert.insert(std::make_pair(i->destination(), cur->second));
248 1);
249 auto input = phi->inputs()[phi_index];
250 CHECK(locations()->find(input) != locations()->end());
251 auto it = locations()->find(phi->output());
252 CHECK(it != locations()->end());
253 if (input->IsConstant()) {
254 CHECK_EQ(it->second, input->index());
255 } else {
256 CHECK_EQ(it->second, phi->operands()[phi_index]);
257 }
258 it->second = phi->virtual_register();
259 } 315 }
316 // Drop current mappings.
317 for (auto i = moves->begin(); i != moves->end(); ++i) {
318 if (i->IsEliminated()) continue;
319 auto cur = map().find(i->destination());
320 if (cur != map().end()) map().erase(cur);
321 }
322 // Insert new values.
323 map().insert(to_insert.begin(), to_insert.end());
260 } 324 }
261 325
262 void RunGapInstruction(Zone* zone, const GapInstruction* gap) { 326 void RunGapInstruction(Zone* zone, const GapInstruction* gap) {
263 for (int i = GapInstruction::FIRST_INNER_POSITION; 327 for (int i = GapInstruction::FIRST_INNER_POSITION;
264 i <= GapInstruction::LAST_INNER_POSITION; i++) { 328 i <= GapInstruction::LAST_INNER_POSITION; i++) {
265 GapInstruction::InnerPosition inner_pos = 329 auto inner_pos = static_cast<GapInstruction::InnerPosition>(i);
266 static_cast<GapInstruction::InnerPosition>(i); 330 auto move = gap->GetParallelMove(inner_pos);
267 const ParallelMove* move = gap->GetParallelMove(inner_pos);
268 if (move == nullptr) continue; 331 if (move == nullptr) continue;
269 RunParallelMoves(zone, move); 332 RunParallelMoves(zone, move);
270 } 333 }
271 } 334 }
272 335
273 void RunParallelMoves(Zone* zone, const ParallelMove* move) {
274 // Compute outgoing mappings.
275 LocationMap to_insert((LocationMap::key_compare()),
276 LocationMap::allocator_type(zone));
277 auto* moves = move->move_operands();
278 for (auto i = moves->begin(); i != moves->end(); ++i) {
279 if (i->IsEliminated()) continue;
280 auto cur = locations()->find(i->source());
281 CHECK(cur != locations()->end());
282 to_insert.insert(std::make_pair(i->destination(), cur->second));
283 }
284 // Drop current mappings.
285 for (auto i = moves->begin(); i != moves->end(); ++i) {
286 if (i->IsEliminated()) continue;
287 auto cur = locations()->find(i->destination());
288 if (cur != locations()->end()) locations()->erase(cur);
289 }
290 // Insert new values.
291 locations()->insert(to_insert.begin(), to_insert.end());
292 }
293
294 void Map(const InstructionOperand* op, int virtual_register) {
295 locations()->insert(std::make_pair(op, virtual_register));
296 }
297
298 void Drop(const InstructionOperand* op) { 336 void Drop(const InstructionOperand* op) {
299 auto it = locations()->find(op); 337 auto it = map().find(op);
300 if (it != locations()->end()) locations()->erase(it); 338 if (it != map().end()) map().erase(it);
301 } 339 }
302 340
303 void DropRegisters(const RegisterConfiguration* config) { 341 void DropRegisters(const RegisterConfiguration* config) {
304 for (int i = 0; i < config->num_general_registers(); ++i) { 342 for (int i = 0; i < config->num_general_registers(); ++i) {
305 InstructionOperand op(InstructionOperand::REGISTER, i); 343 InstructionOperand op(InstructionOperand::REGISTER, i);
306 Drop(&op); 344 Drop(&op);
307 } 345 }
308 for (int i = 0; i < config->num_double_registers(); ++i) { 346 for (int i = 0; i < config->num_double_registers(); ++i) {
309 InstructionOperand op(InstructionOperand::DOUBLE_REGISTER, i); 347 InstructionOperand op(InstructionOperand::DOUBLE_REGISTER, i);
310 Drop(&op); 348 Drop(&op);
311 } 349 }
312 } 350 }
313 351
314 void InitializeFromFirstPredecessor(const InstructionSequence* sequence, 352 void Define(Zone* zone, const InstructionOperand* op, int virtual_register) {
315 const OutgoingMappings* outgoing_mappings, 353 auto value = new (zone) MapValue();
316 const InstructionBlock* block) { 354 value->define_vreg = virtual_register;
317 if (block->predecessors().empty()) return; 355 auto res = map().insert(std::make_pair(op, value));
356 if (!res.second) res.first->second = value;
357 }
358
359 void Use(const InstructionOperand* op, int use_vreg, bool initial_pass) {
360 auto it = map().find(op);
361 CHECK(it != map().end());
362 auto v = it->second;
363 if (v->define_vreg != kInvalidVreg) {
364 CHECK_EQ(v->define_vreg, use_vreg);
365 }
366 // Already used this vreg in this block.
367 if (v->use_vreg != kInvalidVreg) {
368 CHECK_EQ(v->use_vreg, use_vreg);
369 return;
370 }
371 if (!initial_pass) {
372 // A value may be defined and used in this block or the use must have
373 // propagated up.
374 if (v->succ_vreg != kInvalidVreg) {
375 CHECK_EQ(v->succ_vreg, use_vreg);
376 } else {
377 CHECK_EQ(v->define_vreg, use_vreg);
378 }
379 // Mark the use.
380 it->second->use_vreg = use_vreg;
381 return;
382 }
383 // Go up block list and ensure the correct definition is reached.
384 for (; v != nullptr; v = v->incoming) {
385 // Value unused in block.
386 if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
387 continue;
388 }
389 // Found correct definition or use.
390 CHECK(v->define_vreg == use_vreg || v->use_vreg == use_vreg);
391 // Mark the use.
392 it->second->use_vreg = use_vreg;
393 return;
394 }
395 // Use of a non-phi value without definition.
396 CHECK(false);
397 }
398
399 void UsePhi(const InstructionOperand* op, const PhiData* phi,
400 bool initial_pass) {
401 auto it = map().find(op);
402 CHECK(it != map().end());
403 auto v = it->second;
404 int use_vreg = phi->virtual_register;
405 // Phis are not defined.
406 CHECK_EQ(kInvalidVreg, v->define_vreg);
407 // Already used this vreg in this block.
408 if (v->use_vreg != kInvalidVreg) {
409 CHECK_EQ(v->use_vreg, use_vreg);
410 return;
411 }
412 if (!initial_pass) {
413 // A used phi must have propagated its use to a predecessor.
414 CHECK_EQ(v->succ_vreg, use_vreg);
415 // Mark the use.
416 v->use_vreg = use_vreg;
417 return;
418 }
419 // Go up the block list starting at the first predecessor and ensure this
420 // phi has a correct use or definition.
421 for (v = v->incoming; v != nullptr; v = v->incoming) {
422 // Value unused in block.
423 if (v->define_vreg == kInvalidVreg && v->use_vreg == kInvalidVreg) {
424 continue;
425 }
426 // Found correct definition or use.
427 if (v->define_vreg != kInvalidVreg) {
428 CHECK(v->define_vreg == phi->first_pred_vreg);
429 } else if (v->use_vreg != phi->first_pred_vreg) {
430 // Walk the phi chain, hunting for a matching phi use.
431 auto p = phi;
432 for (; p != nullptr; p = p->first_pred_phi) {
433 if (p->virtual_register == v->use_vreg) break;
434 }
435 CHECK_NE(nullptr, p);
436 }
437 // Mark the use.
438 it->second->use_vreg = use_vreg;
439 return;
440 }
441 // Use of a phi value without definition.
442 CHECK(false);
443 }
444
445 private:
446 Map map_;
447 DISALLOW_COPY_AND_ASSIGN(OperandMap);
448 };
449
450 } // namespace
451
452
453 class RegisterAllocatorVerifier::BlockMaps {
454 public:
455 BlockMaps(Zone* zone, const InstructionSequence* sequence)
456 : zone_(zone),
457 sequence_(sequence),
458 phi_map_guard_(sequence->VirtualRegisterCount(), zone),
459 phi_map_(zone),
460 incoming_maps_(zone),
461 outgoing_maps_(zone) {
462 InitializePhis();
463 InitializeOperandMaps();
464 }
465
466 bool IsPhi(int virtual_register) {
467 return phi_map_guard_.Contains(virtual_register);
468 }
469
470 const PhiData* GetPhi(int virtual_register) {
471 auto it = phi_map_.find(virtual_register);
472 CHECK(it != phi_map_.end());
473 return it->second;
474 }
475
476 OperandMap* InitializeIncoming(size_t block_index, bool initial_pass) {
477 return initial_pass ? InitializeFromFirstPredecessor(block_index)
478 : InitializeFromIntersection(block_index);
479 }
480
481 void PropagateUsesBackwards() {
482 typedef std::set<size_t, std::greater<size_t>, zone_allocator<size_t>>
483 BlockIds;
484 BlockIds block_ids((BlockIds::key_compare()),
485 zone_allocator<size_t>(zone()));
486 // First ensure that incoming contains only keys in all predecessors.
487 for (auto block : sequence()->instruction_blocks()) {
488 size_t index = block->rpo_number().ToSize();
489 block_ids.insert(index);
490 auto& succ_map = incoming_maps_[index]->map();
491 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
492 auto pred_rpo = block->predecessors()[i];
493 succ_map.Intersect(outgoing_maps_[pred_rpo.ToSize()]->map());
494 }
495 }
496 // Back propagation fixpoint.
497 while (!block_ids.empty()) {
498 // Pop highest block_id.
499 auto block_id_it = block_ids.begin();
500 const size_t succ_index = *block_id_it;
501 block_ids.erase(block_id_it);
502 // Propagate uses back to their definition blocks using succ_vreg.
503 auto block = sequence()->instruction_blocks()[succ_index];
504 auto& succ_map = incoming_maps_[succ_index]->map();
505 for (size_t i = 0; i < block->PredecessorCount(); ++i) {
506 for (auto& succ_val : succ_map) {
507 // An incoming map contains no defines.
508 CHECK_EQ(kInvalidVreg, succ_val.second->define_vreg);
509 // Compute succ_vreg.
510 int succ_vreg = succ_val.second->succ_vreg;
511 if (succ_vreg == kInvalidVreg) {
512 succ_vreg = succ_val.second->use_vreg;
513 // Initialize succ_vreg in back propagation chain.
514 succ_val.second->succ_vreg = succ_vreg;
515 }
516 if (succ_vreg == kInvalidVreg) continue;
517 // May need to transition phi.
518 if (IsPhi(succ_vreg)) {
519 auto phi = GetPhi(succ_vreg);
520 if (phi->definition_rpo.ToSize() == succ_index) {
521 // phi definition block, transition to pred value.
522 succ_vreg = phi->operands[i];
523 }
524 }
525 // Push succ_vreg up to all predecessors.
526 auto pred_rpo = block->predecessors()[i];
527 auto& pred_map = outgoing_maps_[pred_rpo.ToSize()]->map();
528 auto& pred_val = *pred_map.find(succ_val.first);
529 if (pred_val.second->use_vreg != kInvalidVreg) {
530 CHECK_EQ(succ_vreg, pred_val.second->use_vreg);
531 }
532 if (pred_val.second->define_vreg != kInvalidVreg) {
533 CHECK_EQ(succ_vreg, pred_val.second->define_vreg);
534 }
535 if (pred_val.second->succ_vreg != kInvalidVreg) {
536 CHECK_EQ(succ_vreg, pred_val.second->succ_vreg);
537 } else {
538 pred_val.second->succ_vreg = succ_vreg;
539 block_ids.insert(pred_rpo.ToSize());
540 }
541 }
542 }
543 }
544 // Clear uses and back links for second pass.
545 for (auto operand_map : incoming_maps_) {
546 for (auto& succ_val : operand_map->map()) {
547 succ_val.second->incoming = nullptr;
548 succ_val.second->use_vreg = kInvalidVreg;
549 }
550 }
551 }
552
553 private:
554 OperandMap* InitializeFromFirstPredecessor(size_t block_index) {
555 auto to_init = outgoing_maps_[block_index];
556 CHECK(to_init->map().empty());
557 auto block = sequence()->instruction_blocks()[block_index];
558 if (block->predecessors().empty()) return to_init;
318 size_t predecessor_index = block->predecessors()[0].ToSize(); 559 size_t predecessor_index = block->predecessors()[0].ToSize();
560 // Ensure not a backedge.
319 CHECK(predecessor_index < block->rpo_number().ToSize()); 561 CHECK(predecessor_index < block->rpo_number().ToSize());
320 auto* incoming = outgoing_mappings->at(predecessor_index); 562 auto incoming = outgoing_maps_[predecessor_index];
321 if (block->PredecessorCount() >= 1) { 563 // Copy map and replace values.
322 // Update incoming map with phis. The remaining phis will be checked later 564 to_init->map() = incoming->map();
323 // as their mappings are not guaranteed to exist yet. 565 for (auto& it : to_init->map()) {
324 incoming->RunPhis(sequence, block, 0); 566 auto incoming = it.second;
325 } 567 it.second = new (zone()) OperandMap::MapValue();
326 // Now initialize outgoing mapping for this block with incoming mapping. 568 it.second->incoming = incoming;
327 CHECK(locations_.empty()); 569 }
328 locations_ = incoming->locations_; 570 // Copy to incoming map for second pass.
329 } 571 incoming_maps_[block_index]->map() = to_init->map();
330 572 return to_init;
331 void InitializeFromIntersection() { locations_ = predecessor_intersection_; } 573 }
332 574
333 void InitializeIntersection(const OutgoingMapping* incoming) { 575 OperandMap* InitializeFromIntersection(size_t block_index) {
334 CHECK(predecessor_intersection_.empty()); 576 return incoming_maps_[block_index];
335 predecessor_intersection_ = incoming->locations_; 577 }
336 } 578
337 579 void InitializeOperandMaps() {
338 void Intersect(const OutgoingMapping* other) { 580 size_t block_count = sequence()->instruction_blocks().size();
339 if (predecessor_intersection_.empty()) return; 581 incoming_maps_.reserve(block_count);
340 auto it = predecessor_intersection_.begin(); 582 outgoing_maps_.reserve(block_count);
341 OperandLess less; 583 for (size_t i = 0; i < block_count; ++i) {
342 for (const auto& o : other->locations_) { 584 incoming_maps_.push_back(new (zone()) OperandMap(zone()));
343 while (less(it->first, o.first)) { 585 outgoing_maps_.push_back(new (zone()) OperandMap(zone()));
344 ++it; 586 }
345 if (it == predecessor_intersection_.end()) return; 587 }
346 } 588
347 if (it->first->Equals(o.first)) { 589 void InitializePhis() {
348 if (o.second != it->second) { 590 const size_t block_count = sequence()->instruction_blocks().size();
349 predecessor_intersection_.erase(it++); 591 for (size_t block_index = 0; block_index < block_count; ++block_index) {
350 } else { 592 const auto block = sequence()->instruction_blocks()[block_index];
351 ++it; 593 for (auto phi : block->phis()) {
594 int first_pred_vreg = phi->operands()[0];
595 const PhiData* first_pred_phi = nullptr;
596 if (IsPhi(first_pred_vreg)) {
597 first_pred_phi = GetPhi(first_pred_vreg);
598 first_pred_vreg = first_pred_phi->first_pred_vreg;
352 } 599 }
353 if (it == predecessor_intersection_.end()) return; 600 CHECK(!IsPhi(first_pred_vreg));
354 } 601 auto phi_data = new (zone()) PhiData(
355 } 602 block->rpo_number(), phi, first_pred_vreg, first_pred_phi, zone());
356 } 603 auto res =
357 604 phi_map_.insert(std::make_pair(phi->virtual_register(), phi_data));
358 private: 605 CHECK(res.second);
359 LocationMap locations_; 606 phi_map_guard_.Add(phi->virtual_register());
360 LocationMap predecessor_intersection_; 607 }
361 608 }
362 DISALLOW_COPY_AND_ASSIGN(OutgoingMapping); 609 }
610
611 typedef ZoneVector<OperandMap*> OperandMaps;
612 typedef ZoneVector<PhiData*> PhiVector;
613
614 Zone* zone() const { return zone_; }
615 const InstructionSequence* sequence() const { return sequence_; }
616
617 Zone* const zone_;
618 const InstructionSequence* const sequence_;
619 BitVector phi_map_guard_;
620 PhiMap phi_map_;
621 OperandMaps incoming_maps_;
622 OperandMaps outgoing_maps_;
363 }; 623 };
364 624
365 625
366 // Verify that all gap moves move the operands for a virtual register into the
367 // correct location for every instruction.
368 void RegisterAllocatorVerifier::VerifyGapMoves() { 626 void RegisterAllocatorVerifier::VerifyGapMoves() {
369 typedef ZoneVector<OutgoingMapping*> OutgoingMappings; 627 BlockMaps block_maps(zone(), sequence());
370 OutgoingMappings outgoing_mappings( 628 VerifyGapMoves(&block_maps, true);
371 static_cast<int>(sequence()->instruction_blocks().size()), nullptr, 629 block_maps.PropagateUsesBackwards();
372 zone()); 630 VerifyGapMoves(&block_maps, false);
373 // Construct all mappings, ignoring back edges and multiple entries.
374 ConstructOutgoingMappings(&outgoing_mappings, true);
375 // Run all remaining phis and compute the intersection of all predecessor
376 // mappings.
377 for (const auto* block : sequence()->instruction_blocks()) {
378 if (block->PredecessorCount() == 0) continue;
379 const size_t block_index = block->rpo_number().ToSize();
380 auto* mapping = outgoing_mappings[block_index];
381 bool initialized = false;
382 // Walk predecessors in reverse to ensure Intersect is correctly working.
383 // If it did nothing, the second pass would do exactly what the first pass
384 // did.
385 for (size_t phi_input = block->PredecessorCount() - 1; true; --phi_input) {
386 const size_t pred_block_index = block->predecessors()[phi_input].ToSize();
387 auto* incoming = outgoing_mappings[pred_block_index];
388 if (phi_input != 0) incoming->RunPhis(sequence(), block, phi_input);
389 if (!initialized) {
390 mapping->InitializeIntersection(incoming);
391 initialized = true;
392 } else {
393 mapping->Intersect(incoming);
394 }
395 if (phi_input == 0) break;
396 }
397 }
398 // Construct all mappings again, this time using the instersection mapping
399 // above as the incoming mapping instead of the result from the first
400 // predecessor.
401 ConstructOutgoingMappings(&outgoing_mappings, false);
402 } 631 }
403 632
404 633
405 void RegisterAllocatorVerifier::ConstructOutgoingMappings( 634 // Compute and verify outgoing values for every block.
406 OutgoingMappings* outgoing_mappings, bool initial_pass) { 635 void RegisterAllocatorVerifier::VerifyGapMoves(BlockMaps* block_maps,
407 // Compute the locations of all virtual registers leaving every block, using 636 bool initial_pass) {
408 // only the first predecessor as source for the input mapping. 637 const size_t block_count = sequence()->instruction_blocks().size();
409 for (const auto* block : sequence()->instruction_blocks()) { 638 for (size_t block_index = 0; block_index < block_count; ++block_index) {
410 const size_t block_index = block->rpo_number().ToSize(); 639 auto current = block_maps->InitializeIncoming(block_index, initial_pass);
411 auto* current = outgoing_mappings->at(block_index); 640 const auto block = sequence()->instruction_blocks()[block_index];
412 CHECK(initial_pass == (current == nullptr));
413 // Initialize current.
414 if (!initial_pass) {
415 // Skip check second time around for blocks without multiple predecessors
416 // as we have already executed this in the initial run.
417 if (block->PredecessorCount() <= 1) continue;
418 current->InitializeFromIntersection();
419 } else {
420 current = new (zone()) OutgoingMapping(zone());
421 outgoing_mappings->at(block_index) = current;
422 // Copy outgoing values from predecessor block.
423 current->InitializeFromFirstPredecessor(sequence(), outgoing_mappings,
424 block);
425 }
426 // Update current with gaps and operands for all instructions in block.
427 for (int instr_index = block->code_start(); instr_index < block->code_end(); 641 for (int instr_index = block->code_start(); instr_index < block->code_end();
428 ++instr_index) { 642 ++instr_index) {
429 const auto& instr_constraint = constraints_[instr_index]; 643 const auto& instr_constraint = constraints_[instr_index];
430 const auto* instr = instr_constraint.instruction_; 644 const auto instr = instr_constraint.instruction_;
431 const auto* op_constraints = instr_constraint.operand_constraints_; 645 if (instr->IsSourcePosition()) continue;
646 if (instr->IsGapMoves()) {
647 current->RunGapInstruction(zone(), GapInstruction::cast(instr));
648 continue;
649 }
650 const auto op_constraints = instr_constraint.operand_constraints_;
432 size_t count = 0; 651 size_t count = 0;
433 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) { 652 for (size_t i = 0; i < instr->InputCount(); ++i, ++count) {
434 if (op_constraints[count].type_ == kImmediate) continue; 653 if (op_constraints[count].type_ == kImmediate) continue;
435 auto it = current->locations()->find(instr->InputAt(i));
436 int virtual_register = op_constraints[count].virtual_register_; 654 int virtual_register = op_constraints[count].virtual_register_;
437 CHECK(it != current->locations()->end()); 655 auto op = instr->InputAt(i);
438 CHECK_EQ(it->second, virtual_register); 656 if (!block_maps->IsPhi(virtual_register)) {
657 current->Use(op, virtual_register, initial_pass);
658 } else {
659 auto phi = block_maps->GetPhi(virtual_register);
660 current->UsePhi(op, phi, initial_pass);
661 }
439 } 662 }
440 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) { 663 for (size_t i = 0; i < instr->TempCount(); ++i, ++count) {
441 current->Drop(instr->TempAt(i)); 664 current->Drop(instr->TempAt(i));
442 } 665 }
443 if (instr->IsCall()) { 666 if (instr->IsCall()) {
444 current->DropRegisters(config()); 667 current->DropRegisters(config());
445 } 668 }
446 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) { 669 for (size_t i = 0; i < instr->OutputCount(); ++i, ++count) {
447 current->Drop(instr->OutputAt(i));
448 int virtual_register = op_constraints[count].virtual_register_; 670 int virtual_register = op_constraints[count].virtual_register_;
449 current->Map(instr->OutputAt(i), virtual_register); 671 current->Define(zone(), instr->OutputAt(i), virtual_register);
450 }
451 if (instr->IsGapMoves()) {
452 const auto* gap = GapInstruction::cast(instr);
453 current->RunGapInstruction(zone(), gap);
454 } 672 }
455 } 673 }
456 } 674 }
457 } 675 }
458 676
459 } // namespace compiler 677 } // namespace compiler
460 } // namespace internal 678 } // namespace internal
461 } // namespace v8 679 } // namespace v8
OLDNEW
« no previous file with comments | « src/compiler/register-allocator-verifier.h ('k') | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698