Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(2)

Side by Side Diff: src/lithium-allocator.cc

Issue 17553004: Fix zone usage within LAllocator. (Closed) Base URL: git@github.com:v8/v8.git@master
Patch Set: Rebased on https://codereview.chromium.org/18022002 Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/lithium-allocator.h ('k') | src/lithium-allocator-inl.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 523 matching lines...) Expand 10 before | Expand all | Expand 10 after
534 AdvanceLastProcessedMarker(a, advance_last_processed_up_to); 534 AdvanceLastProcessedMarker(a, advance_last_processed_up_to);
535 } else { 535 } else {
536 b = b->next(); 536 b = b->next();
537 } 537 }
538 } 538 }
539 return LifetimePosition::Invalid(); 539 return LifetimePosition::Invalid();
540 } 540 }
541 541
542 542
543 LAllocator::LAllocator(int num_values, HGraph* graph) 543 LAllocator::LAllocator(int num_values, HGraph* graph)
544 : zone_(graph->zone()), 544 : zone_(graph->isolate()),
545 chunk_(NULL), 545 chunk_(NULL),
546 live_in_sets_(graph->blocks()->length(), zone_), 546 live_in_sets_(graph->blocks()->length(), zone()),
547 live_ranges_(num_values * 2, zone_), 547 live_ranges_(num_values * 2, zone()),
548 fixed_live_ranges_(NULL), 548 fixed_live_ranges_(NULL),
549 fixed_double_live_ranges_(NULL), 549 fixed_double_live_ranges_(NULL),
550 unhandled_live_ranges_(num_values * 2, zone_), 550 unhandled_live_ranges_(num_values * 2, zone()),
551 active_live_ranges_(8, zone_), 551 active_live_ranges_(8, zone()),
552 inactive_live_ranges_(8, zone_), 552 inactive_live_ranges_(8, zone()),
553 reusable_slots_(8, zone_), 553 reusable_slots_(8, zone()),
554 next_virtual_register_(num_values), 554 next_virtual_register_(num_values),
555 first_artificial_register_(num_values), 555 first_artificial_register_(num_values),
556 mode_(GENERAL_REGISTERS), 556 mode_(GENERAL_REGISTERS),
557 num_registers_(-1), 557 num_registers_(-1),
558 graph_(graph), 558 graph_(graph),
559 has_osr_entry_(false), 559 has_osr_entry_(false),
560 allocation_ok_(true) { } 560 allocation_ok_(true) { }
561 561
562 562
563 void LAllocator::InitializeLivenessAnalysis() { 563 void LAllocator::InitializeLivenessAnalysis() {
564 // Initialize the live_in sets for each block to NULL. 564 // Initialize the live_in sets for each block to NULL.
565 int block_count = graph_->blocks()->length(); 565 int block_count = graph_->blocks()->length();
566 live_in_sets_.Initialize(block_count, zone()); 566 live_in_sets_.Initialize(block_count, zone());
567 live_in_sets_.AddBlock(NULL, block_count, zone()); 567 live_in_sets_.AddBlock(NULL, block_count, zone());
568 } 568 }
569 569
570 570
571 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) { 571 BitVector* LAllocator::ComputeLiveOut(HBasicBlock* block) {
572 // Compute live out for the given block, except not including backward 572 // Compute live out for the given block, except not including backward
573 // successor edges. 573 // successor edges.
574 BitVector* live_out = new(zone_) BitVector(next_virtual_register_, zone_); 574 BitVector* live_out = new(zone()) BitVector(next_virtual_register_, zone());
575 575
576 // Process all successor blocks. 576 // Process all successor blocks.
577 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { 577 for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) {
578 // Add values live on entry to the successor. Note the successor's 578 // Add values live on entry to the successor. Note the successor's
579 // live_in will not be computed yet for backwards edges. 579 // live_in will not be computed yet for backwards edges.
580 HBasicBlock* successor = it.Current(); 580 HBasicBlock* successor = it.Current();
581 BitVector* live_in = live_in_sets_[successor->block_id()]; 581 BitVector* live_in = live_in_sets_[successor->block_id()];
582 if (live_in != NULL) live_out->Union(*live_in); 582 if (live_in != NULL) live_out->Union(*live_in);
583 583
584 // All phi input operands corresponding to this successor edge are live 584 // All phi input operands corresponding to this successor edge are live
(...skipping 17 matching lines...) Expand all
602 // Add an interval that includes the entire block to the live range for 602 // Add an interval that includes the entire block to the live range for
603 // each live_out value. 603 // each live_out value.
604 LifetimePosition start = LifetimePosition::FromInstructionIndex( 604 LifetimePosition start = LifetimePosition::FromInstructionIndex(
605 block->first_instruction_index()); 605 block->first_instruction_index());
606 LifetimePosition end = LifetimePosition::FromInstructionIndex( 606 LifetimePosition end = LifetimePosition::FromInstructionIndex(
607 block->last_instruction_index()).NextInstruction(); 607 block->last_instruction_index()).NextInstruction();
608 BitVector::Iterator iterator(live_out); 608 BitVector::Iterator iterator(live_out);
609 while (!iterator.Done()) { 609 while (!iterator.Done()) {
610 int operand_index = iterator.Current(); 610 int operand_index = iterator.Current();
611 LiveRange* range = LiveRangeFor(operand_index); 611 LiveRange* range = LiveRangeFor(operand_index);
612 range->AddUseInterval(start, end, zone_); 612 range->AddUseInterval(start, end, zone());
613 iterator.Advance(); 613 iterator.Advance();
614 } 614 }
615 } 615 }
616 616
617 617
618 int LAllocator::FixedDoubleLiveRangeID(int index) { 618 int LAllocator::FixedDoubleLiveRangeID(int index) {
619 return -index - 1 - Register::kMaxNumAllocatableRegisters; 619 return -index - 1 - Register::kMaxNumAllocatableRegisters;
620 } 620 }
621 621
622 622
(...skipping 10 matching lines...) Expand all
633 } else if (operand->HasFixedDoubleRegisterPolicy()) { 633 } else if (operand->HasFixedDoubleRegisterPolicy()) {
634 int reg_index = operand->fixed_register_index(); 634 int reg_index = operand->fixed_register_index();
635 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index); 635 operand->ConvertTo(LOperand::DOUBLE_REGISTER, reg_index);
636 } else { 636 } else {
637 UNREACHABLE(); 637 UNREACHABLE();
638 } 638 }
639 if (is_tagged) { 639 if (is_tagged) {
640 TraceAlloc("Fixed reg is tagged at %d\n", pos); 640 TraceAlloc("Fixed reg is tagged at %d\n", pos);
641 LInstruction* instr = InstructionAt(pos); 641 LInstruction* instr = InstructionAt(pos);
642 if (instr->HasPointerMap()) { 642 if (instr->HasPointerMap()) {
643 instr->pointer_map()->RecordPointer(operand, zone()); 643 instr->pointer_map()->RecordPointer(operand, chunk()->zone());
644 } 644 }
645 } 645 }
646 return operand; 646 return operand;
647 } 647 }
648 648
649 649
650 LiveRange* LAllocator::FixedLiveRangeFor(int index) { 650 LiveRange* LAllocator::FixedLiveRangeFor(int index) {
651 ASSERT(index < Register::kMaxNumAllocatableRegisters); 651 ASSERT(index < Register::kMaxNumAllocatableRegisters);
652 LiveRange* result = fixed_live_ranges_[index]; 652 LiveRange* result = fixed_live_ranges_[index];
653 if (result == NULL) { 653 if (result == NULL) {
654 result = new(zone_) LiveRange(FixedLiveRangeID(index), zone_); 654 result = new(zone()) LiveRange(FixedLiveRangeID(index), chunk()->zone());
655 ASSERT(result->IsFixed()); 655 ASSERT(result->IsFixed());
656 SetLiveRangeAssignedRegister(result, index, GENERAL_REGISTERS, zone_); 656 SetLiveRangeAssignedRegister(result, index, GENERAL_REGISTERS);
657 fixed_live_ranges_[index] = result; 657 fixed_live_ranges_[index] = result;
658 } 658 }
659 return result; 659 return result;
660 } 660 }
661 661
662 662
663 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) { 663 LiveRange* LAllocator::FixedDoubleLiveRangeFor(int index) {
664 ASSERT(index < DoubleRegister::NumAllocatableRegisters()); 664 ASSERT(index < DoubleRegister::NumAllocatableRegisters());
665 LiveRange* result = fixed_double_live_ranges_[index]; 665 LiveRange* result = fixed_double_live_ranges_[index];
666 if (result == NULL) { 666 if (result == NULL) {
667 result = new(zone_) LiveRange(FixedDoubleLiveRangeID(index), zone_); 667 result = new(zone()) LiveRange(FixedDoubleLiveRangeID(index),
668 chunk()->zone());
668 ASSERT(result->IsFixed()); 669 ASSERT(result->IsFixed());
669 SetLiveRangeAssignedRegister(result, index, DOUBLE_REGISTERS, zone_); 670 SetLiveRangeAssignedRegister(result, index, DOUBLE_REGISTERS);
670 fixed_double_live_ranges_[index] = result; 671 fixed_double_live_ranges_[index] = result;
671 } 672 }
672 return result; 673 return result;
673 } 674 }
674 675
675 676
676 LiveRange* LAllocator::LiveRangeFor(int index) { 677 LiveRange* LAllocator::LiveRangeFor(int index) {
677 if (index >= live_ranges_.length()) { 678 if (index >= live_ranges_.length()) {
678 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone()); 679 live_ranges_.AddBlock(NULL, index - live_ranges_.length() + 1, zone());
679 } 680 }
680 LiveRange* result = live_ranges_[index]; 681 LiveRange* result = live_ranges_[index];
681 if (result == NULL) { 682 if (result == NULL) {
682 result = new(zone_) LiveRange(index, zone_); 683 result = new(zone()) LiveRange(index, chunk()->zone());
683 live_ranges_[index] = result; 684 live_ranges_[index] = result;
684 } 685 }
685 return result; 686 return result;
686 } 687 }
687 688
688 689
689 LGap* LAllocator::GetLastGap(HBasicBlock* block) { 690 LGap* LAllocator::GetLastGap(HBasicBlock* block) {
690 int last_instruction = block->last_instruction_index(); 691 int last_instruction = block->last_instruction_index();
691 int index = chunk_->NearestGapPos(last_instruction); 692 int index = chunk_->NearestGapPos(last_instruction);
692 return GapAt(index); 693 return GapAt(index);
(...skipping 25 matching lines...) Expand all
718 719
719 720
720 void LAllocator::Define(LifetimePosition position, 721 void LAllocator::Define(LifetimePosition position,
721 LOperand* operand, 722 LOperand* operand,
722 LOperand* hint) { 723 LOperand* hint) {
723 LiveRange* range = LiveRangeFor(operand); 724 LiveRange* range = LiveRangeFor(operand);
724 if (range == NULL) return; 725 if (range == NULL) return;
725 726
726 if (range->IsEmpty() || range->Start().Value() > position.Value()) { 727 if (range->IsEmpty() || range->Start().Value() > position.Value()) {
727 // Can happen if there is a definition without use. 728 // Can happen if there is a definition without use.
728 range->AddUseInterval(position, position.NextInstruction(), zone_); 729 range->AddUseInterval(position, position.NextInstruction(), zone());
729 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone_); 730 range->AddUsePosition(position.NextInstruction(), NULL, NULL, zone());
730 } else { 731 } else {
731 range->ShortenTo(position); 732 range->ShortenTo(position);
732 } 733 }
733 734
734 if (operand->IsUnallocated()) { 735 if (operand->IsUnallocated()) {
735 LUnallocated* unalloc_operand = LUnallocated::cast(operand); 736 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
736 range->AddUsePosition(position, unalloc_operand, hint, zone_); 737 range->AddUsePosition(position, unalloc_operand, hint, zone());
737 } 738 }
738 } 739 }
739 740
740 741
741 void LAllocator::Use(LifetimePosition block_start, 742 void LAllocator::Use(LifetimePosition block_start,
742 LifetimePosition position, 743 LifetimePosition position,
743 LOperand* operand, 744 LOperand* operand,
744 LOperand* hint) { 745 LOperand* hint) {
745 LiveRange* range = LiveRangeFor(operand); 746 LiveRange* range = LiveRangeFor(operand);
746 if (range == NULL) return; 747 if (range == NULL) return;
747 if (operand->IsUnallocated()) { 748 if (operand->IsUnallocated()) {
748 LUnallocated* unalloc_operand = LUnallocated::cast(operand); 749 LUnallocated* unalloc_operand = LUnallocated::cast(operand);
749 range->AddUsePosition(position, unalloc_operand, hint, zone_); 750 range->AddUsePosition(position, unalloc_operand, hint, zone());
750 } 751 }
751 range->AddUseInterval(block_start, position, zone_); 752 range->AddUseInterval(block_start, position, zone());
752 } 753 }
753 754
754 755
755 void LAllocator::AddConstraintsGapMove(int index, 756 void LAllocator::AddConstraintsGapMove(int index,
756 LOperand* from, 757 LOperand* from,
757 LOperand* to) { 758 LOperand* to) {
758 LGap* gap = GapAt(index); 759 LGap* gap = GapAt(index);
759 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone()); 760 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
761 chunk()->zone());
760 if (from->IsUnallocated()) { 762 if (from->IsUnallocated()) {
761 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 763 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
762 for (int i = 0; i < move_operands->length(); ++i) { 764 for (int i = 0; i < move_operands->length(); ++i) {
763 LMoveOperands cur = move_operands->at(i); 765 LMoveOperands cur = move_operands->at(i);
764 LOperand* cur_to = cur.destination(); 766 LOperand* cur_to = cur.destination();
765 if (cur_to->IsUnallocated()) { 767 if (cur_to->IsUnallocated()) {
766 if (LUnallocated::cast(cur_to)->virtual_register() == 768 if (LUnallocated::cast(cur_to)->virtual_register() ==
767 LUnallocated::cast(from)->virtual_register()) { 769 LUnallocated::cast(from)->virtual_register()) {
768 move->AddMove(cur.source(), to, zone()); 770 move->AddMove(cur.source(), to, chunk()->zone());
769 return; 771 return;
770 } 772 }
771 } 773 }
772 } 774 }
773 } 775 }
774 move->AddMove(from, to, zone()); 776 move->AddMove(from, to, chunk()->zone());
775 } 777 }
776 778
777 779
778 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) { 780 void LAllocator::MeetRegisterConstraints(HBasicBlock* block) {
779 int start = block->first_instruction_index(); 781 int start = block->first_instruction_index();
780 int end = block->last_instruction_index(); 782 int end = block->last_instruction_index();
781 if (start == -1) return; 783 if (start == -1) return;
782 for (int i = start; i <= end; ++i) { 784 for (int i = start; i <= end; ++i) {
783 if (IsGapAt(i)) { 785 if (IsGapAt(i)) {
784 LInstruction* instr = NULL; 786 LInstruction* instr = NULL;
(...skipping 19 matching lines...) Expand all
804 } 806 }
805 } 807 }
806 } 808 }
807 809
808 // Handle fixed output operand. 810 // Handle fixed output operand.
809 if (first != NULL && first->Output() != NULL) { 811 if (first != NULL && first->Output() != NULL) {
810 LUnallocated* first_output = LUnallocated::cast(first->Output()); 812 LUnallocated* first_output = LUnallocated::cast(first->Output());
811 LiveRange* range = LiveRangeFor(first_output->virtual_register()); 813 LiveRange* range = LiveRangeFor(first_output->virtual_register());
812 bool assigned = false; 814 bool assigned = false;
813 if (first_output->HasFixedPolicy()) { 815 if (first_output->HasFixedPolicy()) {
814 LUnallocated* output_copy = first_output->CopyUnconstrained(zone()); 816 LUnallocated* output_copy = first_output->CopyUnconstrained(
817 chunk()->zone());
815 bool is_tagged = HasTaggedValue(first_output->virtual_register()); 818 bool is_tagged = HasTaggedValue(first_output->virtual_register());
816 AllocateFixed(first_output, gap_index, is_tagged); 819 AllocateFixed(first_output, gap_index, is_tagged);
817 820
818 // This value is produced on the stack, we never need to spill it. 821 // This value is produced on the stack, we never need to spill it.
819 if (first_output->IsStackSlot()) { 822 if (first_output->IsStackSlot()) {
820 range->SetSpillOperand(first_output); 823 range->SetSpillOperand(first_output);
821 range->SetSpillStartIndex(gap_index - 1); 824 range->SetSpillStartIndex(gap_index - 1);
822 assigned = true; 825 assigned = true;
823 } 826 }
824 chunk_->AddGapMove(gap_index, first_output, output_copy); 827 chunk_->AddGapMove(gap_index, first_output, output_copy);
825 } 828 }
826 829
827 if (!assigned) { 830 if (!assigned) {
828 range->SetSpillStartIndex(gap_index); 831 range->SetSpillStartIndex(gap_index);
829 832
830 // This move to spill operand is not a real use. Liveness analysis 833 // This move to spill operand is not a real use. Liveness analysis
831 // and splitting of live ranges do not account for it. 834 // and splitting of live ranges do not account for it.
832 // Thus it should be inserted to a lifetime position corresponding to 835 // Thus it should be inserted to a lifetime position corresponding to
833 // the instruction end. 836 // the instruction end.
834 LGap* gap = GapAt(gap_index); 837 LGap* gap = GapAt(gap_index);
835 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE, zone()); 838 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::BEFORE,
836 move->AddMove(first_output, range->GetSpillOperand(), zone()); 839 chunk()->zone());
840 move->AddMove(first_output, range->GetSpillOperand(),
841 chunk()->zone());
837 } 842 }
838 } 843 }
839 844
840 // Handle fixed input operands of second instruction. 845 // Handle fixed input operands of second instruction.
841 if (second != NULL) { 846 if (second != NULL) {
842 for (UseIterator it(second); !it.Done(); it.Advance()) { 847 for (UseIterator it(second); !it.Done(); it.Advance()) {
843 LUnallocated* cur_input = LUnallocated::cast(it.Current()); 848 LUnallocated* cur_input = LUnallocated::cast(it.Current());
844 if (cur_input->HasFixedPolicy()) { 849 if (cur_input->HasFixedPolicy()) {
845 LUnallocated* input_copy = cur_input->CopyUnconstrained(zone()); 850 LUnallocated* input_copy = cur_input->CopyUnconstrained(
851 chunk()->zone());
846 bool is_tagged = HasTaggedValue(cur_input->virtual_register()); 852 bool is_tagged = HasTaggedValue(cur_input->virtual_register());
847 AllocateFixed(cur_input, gap_index + 1, is_tagged); 853 AllocateFixed(cur_input, gap_index + 1, is_tagged);
848 AddConstraintsGapMove(gap_index, input_copy, cur_input); 854 AddConstraintsGapMove(gap_index, input_copy, cur_input);
849 } else if (cur_input->HasWritableRegisterPolicy()) { 855 } else if (cur_input->HasWritableRegisterPolicy()) {
850 // The live range of writable input registers always goes until the end 856 // The live range of writable input registers always goes until the end
851 // of the instruction. 857 // of the instruction.
852 ASSERT(!cur_input->IsUsedAtStart()); 858 ASSERT(!cur_input->IsUsedAtStart());
853 859
854 LUnallocated* input_copy = cur_input->CopyUnconstrained(zone()); 860 LUnallocated* input_copy = cur_input->CopyUnconstrained(
861 chunk()->zone());
855 int vreg = GetVirtualRegister(); 862 int vreg = GetVirtualRegister();
856 if (!AllocationOk()) return; 863 if (!AllocationOk()) return;
857 cur_input->set_virtual_register(vreg); 864 cur_input->set_virtual_register(vreg);
858 865
859 if (RequiredRegisterKind(input_copy->virtual_register()) == 866 if (RequiredRegisterKind(input_copy->virtual_register()) ==
860 DOUBLE_REGISTERS) { 867 DOUBLE_REGISTERS) {
861 double_artificial_registers_.Add( 868 double_artificial_registers_.Add(
862 cur_input->virtual_register() - first_artificial_register_, 869 cur_input->virtual_register() - first_artificial_register_,
863 zone_); 870 zone());
864 } 871 }
865 872
866 AddConstraintsGapMove(gap_index, input_copy, cur_input); 873 AddConstraintsGapMove(gap_index, input_copy, cur_input);
867 } 874 }
868 } 875 }
869 } 876 }
870 877
871 // Handle "output same as input" for second instruction. 878 // Handle "output same as input" for second instruction.
872 if (second != NULL && second->Output() != NULL) { 879 if (second != NULL && second->Output() != NULL) {
873 LUnallocated* second_output = LUnallocated::cast(second->Output()); 880 LUnallocated* second_output = LUnallocated::cast(second->Output());
874 if (second_output->HasSameAsInputPolicy()) { 881 if (second_output->HasSameAsInputPolicy()) {
875 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput()); 882 LUnallocated* cur_input = LUnallocated::cast(second->FirstInput());
876 int output_vreg = second_output->virtual_register(); 883 int output_vreg = second_output->virtual_register();
877 int input_vreg = cur_input->virtual_register(); 884 int input_vreg = cur_input->virtual_register();
878 885
879 LUnallocated* input_copy = cur_input->CopyUnconstrained(zone()); 886 LUnallocated* input_copy = cur_input->CopyUnconstrained(
887 chunk()->zone());
880 cur_input->set_virtual_register(second_output->virtual_register()); 888 cur_input->set_virtual_register(second_output->virtual_register());
881 AddConstraintsGapMove(gap_index, input_copy, cur_input); 889 AddConstraintsGapMove(gap_index, input_copy, cur_input);
882 890
883 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) { 891 if (HasTaggedValue(input_vreg) && !HasTaggedValue(output_vreg)) {
884 int index = gap_index + 1; 892 int index = gap_index + 1;
885 LInstruction* instr = InstructionAt(index); 893 LInstruction* instr = InstructionAt(index);
886 if (instr->HasPointerMap()) { 894 if (instr->HasPointerMap()) {
887 instr->pointer_map()->RecordPointer(input_copy, zone()); 895 instr->pointer_map()->RecordPointer(input_copy, chunk()->zone());
888 } 896 }
889 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) { 897 } else if (!HasTaggedValue(input_vreg) && HasTaggedValue(output_vreg)) {
890 // The input is assumed to immediately have a tagged representation, 898 // The input is assumed to immediately have a tagged representation,
891 // before the pointer map can be used. I.e. the pointer map at the 899 // before the pointer map can be used. I.e. the pointer map at the
892 // instruction will include the output operand (whose value at the 900 // instruction will include the output operand (whose value at the
893 // beginning of the instruction is equal to the input operand). If 901 // beginning of the instruction is equal to the input operand). If
894 // this is not desired, then the pointer map at this instruction needs 902 // this is not desired, then the pointer map at this instruction needs
895 // to be adjusted manually. 903 // to be adjusted manually.
896 } 904 }
897 } 905 }
898 } 906 }
899 } 907 }
900 908
901 909
902 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) { 910 void LAllocator::ProcessInstructions(HBasicBlock* block, BitVector* live) {
903 int block_start = block->first_instruction_index(); 911 int block_start = block->first_instruction_index();
904 int index = block->last_instruction_index(); 912 int index = block->last_instruction_index();
905 913
906 LifetimePosition block_start_position = 914 LifetimePosition block_start_position =
907 LifetimePosition::FromInstructionIndex(block_start); 915 LifetimePosition::FromInstructionIndex(block_start);
908 916
909 while (index >= block_start) { 917 while (index >= block_start) {
910 LifetimePosition curr_position = 918 LifetimePosition curr_position =
911 LifetimePosition::FromInstructionIndex(index); 919 LifetimePosition::FromInstructionIndex(index);
912 920
913 if (IsGapAt(index)) { 921 if (IsGapAt(index)) {
914 // We have a gap at this position. 922 // We have a gap at this position.
915 LGap* gap = GapAt(index); 923 LGap* gap = GapAt(index);
916 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone()); 924 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
925 chunk()->zone());
917 const ZoneList<LMoveOperands>* move_operands = move->move_operands(); 926 const ZoneList<LMoveOperands>* move_operands = move->move_operands();
918 for (int i = 0; i < move_operands->length(); ++i) { 927 for (int i = 0; i < move_operands->length(); ++i) {
919 LMoveOperands* cur = &move_operands->at(i); 928 LMoveOperands* cur = &move_operands->at(i);
920 if (cur->IsIgnored()) continue; 929 if (cur->IsIgnored()) continue;
921 LOperand* from = cur->source(); 930 LOperand* from = cur->source();
922 LOperand* to = cur->destination(); 931 LOperand* to = cur->destination();
923 HPhi* phi = LookupPhi(to); 932 HPhi* phi = LookupPhi(to);
924 LOperand* hint = to; 933 LOperand* hint = to;
925 if (phi != NULL) { 934 if (phi != NULL) {
926 // This is a phi resolving move. 935 // This is a phi resolving move.
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
958 Define(curr_position, output, NULL); 967 Define(curr_position, output, NULL);
959 } 968 }
960 969
961 if (instr->ClobbersRegisters()) { 970 if (instr->ClobbersRegisters()) {
962 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) { 971 for (int i = 0; i < Register::kMaxNumAllocatableRegisters; ++i) {
963 if (output == NULL || !output->IsRegister() || 972 if (output == NULL || !output->IsRegister() ||
964 output->index() != i) { 973 output->index() != i) {
965 LiveRange* range = FixedLiveRangeFor(i); 974 LiveRange* range = FixedLiveRangeFor(i);
966 range->AddUseInterval(curr_position, 975 range->AddUseInterval(curr_position,
967 curr_position.InstructionEnd(), 976 curr_position.InstructionEnd(),
968 zone_); 977 zone());
969 } 978 }
970 } 979 }
971 } 980 }
972 981
973 if (instr->ClobbersDoubleRegisters()) { 982 if (instr->ClobbersDoubleRegisters()) {
974 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) { 983 for (int i = 0; i < DoubleRegister::NumAllocatableRegisters(); ++i) {
975 if (output == NULL || !output->IsDoubleRegister() || 984 if (output == NULL || !output->IsDoubleRegister() ||
976 output->index() != i) { 985 output->index() != i) {
977 LiveRange* range = FixedDoubleLiveRangeFor(i); 986 LiveRange* range = FixedDoubleLiveRangeFor(i);
978 range->AddUseInterval(curr_position, 987 range->AddUseInterval(curr_position,
979 curr_position.InstructionEnd(), 988 curr_position.InstructionEnd(),
980 zone_); 989 zone());
981 } 990 }
982 } 991 }
983 } 992 }
984 993
985 for (UseIterator it(instr); !it.Done(); it.Advance()) { 994 for (UseIterator it(instr); !it.Done(); it.Advance()) {
986 LOperand* input = it.Current(); 995 LOperand* input = it.Current();
987 996
988 LifetimePosition use_pos; 997 LifetimePosition use_pos;
989 if (input->IsUnallocated() && 998 if (input->IsUnallocated() &&
990 LUnallocated::cast(input)->IsUsedAtStart()) { 999 LUnallocated::cast(input)->IsUsedAtStart()) {
(...skipping 27 matching lines...) Expand all
1018 1027
1019 index = index - 1; 1028 index = index - 1;
1020 } 1029 }
1021 } 1030 }
1022 1031
1023 1032
1024 void LAllocator::ResolvePhis(HBasicBlock* block) { 1033 void LAllocator::ResolvePhis(HBasicBlock* block) {
1025 const ZoneList<HPhi*>* phis = block->phis(); 1034 const ZoneList<HPhi*>* phis = block->phis();
1026 for (int i = 0; i < phis->length(); ++i) { 1035 for (int i = 0; i < phis->length(); ++i) {
1027 HPhi* phi = phis->at(i); 1036 HPhi* phi = phis->at(i);
1028 LUnallocated* phi_operand = new(zone_) LUnallocated(LUnallocated::NONE); 1037 LUnallocated* phi_operand =
1038 new(chunk()->zone()) LUnallocated(LUnallocated::NONE);
1029 phi_operand->set_virtual_register(phi->id()); 1039 phi_operand->set_virtual_register(phi->id());
1030 for (int j = 0; j < phi->OperandCount(); ++j) { 1040 for (int j = 0; j < phi->OperandCount(); ++j) {
1031 HValue* op = phi->OperandAt(j); 1041 HValue* op = phi->OperandAt(j);
1032 LOperand* operand = NULL; 1042 LOperand* operand = NULL;
1033 if (op->IsConstant() && op->EmitAtUses()) { 1043 if (op->IsConstant() && op->EmitAtUses()) {
1034 HConstant* constant = HConstant::cast(op); 1044 HConstant* constant = HConstant::cast(op);
1035 operand = chunk_->DefineConstantOperand(constant); 1045 operand = chunk_->DefineConstantOperand(constant);
1036 } else { 1046 } else {
1037 ASSERT(!op->EmitAtUses()); 1047 ASSERT(!op->EmitAtUses());
1038 LUnallocated* unalloc = new(zone_) LUnallocated(LUnallocated::ANY); 1048 LUnallocated* unalloc =
1049 new(chunk()->zone()) LUnallocated(LUnallocated::ANY);
1039 unalloc->set_virtual_register(op->id()); 1050 unalloc->set_virtual_register(op->id());
1040 operand = unalloc; 1051 operand = unalloc;
1041 } 1052 }
1042 HBasicBlock* cur_block = block->predecessors()->at(j); 1053 HBasicBlock* cur_block = block->predecessors()->at(j);
1043 // The gap move must be added without any special processing as in 1054 // The gap move must be added without any special processing as in
1044 // the AddConstraintsGapMove. 1055 // the AddConstraintsGapMove.
1045 chunk_->AddGapMove(cur_block->last_instruction_index() - 1, 1056 chunk_->AddGapMove(cur_block->last_instruction_index() - 1,
1046 operand, 1057 operand,
1047 phi_operand); 1058 phi_operand);
1048 1059
1049 // We are going to insert a move before the branch instruction. 1060 // We are going to insert a move before the branch instruction.
1050 // Some branch instructions (e.g. loops' back edges) 1061 // Some branch instructions (e.g. loops' back edges)
1051 // can potentially cause a GC so they have a pointer map. 1062 // can potentially cause a GC so they have a pointer map.
1052 // By inserting a move we essentially create a copy of a 1063 // By inserting a move we essentially create a copy of a
1053 // value which is invisible to PopulatePointerMaps(), because we store 1064 // value which is invisible to PopulatePointerMaps(), because we store
1054 // it into a location different from the operand of a live range 1065 // it into a location different from the operand of a live range
1055 // covering a branch instruction. 1066 // covering a branch instruction.
1056 // Thus we need to manually record a pointer. 1067 // Thus we need to manually record a pointer.
1057 LInstruction* branch = 1068 LInstruction* branch =
1058 InstructionAt(cur_block->last_instruction_index()); 1069 InstructionAt(cur_block->last_instruction_index());
1059 if (branch->HasPointerMap()) { 1070 if (branch->HasPointerMap()) {
1060 if (phi->representation().IsTagged() && !phi->type().IsSmi()) { 1071 if (phi->representation().IsTagged() && !phi->type().IsSmi()) {
1061 branch->pointer_map()->RecordPointer(phi_operand, zone()); 1072 branch->pointer_map()->RecordPointer(phi_operand, chunk()->zone());
1062 } else if (!phi->representation().IsDouble()) { 1073 } else if (!phi->representation().IsDouble()) {
1063 branch->pointer_map()->RecordUntagged(phi_operand, zone()); 1074 branch->pointer_map()->RecordUntagged(phi_operand, chunk()->zone());
1064 } 1075 }
1065 } 1076 }
1066 } 1077 }
1067 1078
1068 LiveRange* live_range = LiveRangeFor(phi->id()); 1079 LiveRange* live_range = LiveRangeFor(phi->id());
1069 LLabel* label = chunk_->GetLabel(phi->block()->block_id()); 1080 LLabel* label = chunk_->GetLabel(phi->block()->block_id());
1070 label->GetOrCreateParallelMove(LGap::START, zone())-> 1081 label->GetOrCreateParallelMove(LGap::START, chunk()->zone())->
1071 AddMove(phi_operand, live_range->GetSpillOperand(), zone()); 1082 AddMove(phi_operand, live_range->GetSpillOperand(), chunk()->zone());
1072 live_range->SetSpillStartIndex(phi->block()->first_instruction_index()); 1083 live_range->SetSpillStartIndex(phi->block()->first_instruction_index());
1073 } 1084 }
1074 } 1085 }
1075 1086
1076 1087
1077 bool LAllocator::Allocate(LChunk* chunk) { 1088 bool LAllocator::Allocate(LChunk* chunk) {
1078 ASSERT(chunk_ == NULL); 1089 ASSERT(chunk_ == NULL);
1079 chunk_ = static_cast<LPlatformChunk*>(chunk); 1090 chunk_ = static_cast<LPlatformChunk*>(chunk);
1080 assigned_registers_ = 1091 assigned_registers_ =
1081 new(zone()) BitVector(Register::NumAllocatableRegisters(), zone()); 1092 new(chunk->zone()) BitVector(Register::NumAllocatableRegisters(),
1082 assigned_registers_->Clear(); 1093 chunk->zone());
1083 assigned_double_registers_ = 1094 assigned_double_registers_ =
1084 new(zone()) BitVector(DoubleRegister::NumAllocatableRegisters(), 1095 new(chunk->zone()) BitVector(DoubleRegister::NumAllocatableRegisters(),
1085 zone()); 1096 chunk->zone());
1086 assigned_double_registers_->Clear();
1087 MeetRegisterConstraints(); 1097 MeetRegisterConstraints();
1088 if (!AllocationOk()) return false; 1098 if (!AllocationOk()) return false;
1089 ResolvePhis(); 1099 ResolvePhis();
1090 BuildLiveRanges(); 1100 BuildLiveRanges();
1091 AllocateGeneralRegisters(); 1101 AllocateGeneralRegisters();
1092 if (!AllocationOk()) return false; 1102 if (!AllocationOk()) return false;
1093 AllocateDoubleRegisters(); 1103 AllocateDoubleRegisters();
1094 if (!AllocationOk()) return false; 1104 if (!AllocationOk()) return false;
1095 PopulatePointerMaps(); 1105 PopulatePointerMaps();
1096 ConnectRanges(); 1106 ConnectRanges();
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1141 if (cur_range->CanCover(pred_end)) { 1151 if (cur_range->CanCover(pred_end)) {
1142 ASSERT(pred_cover == NULL); 1152 ASSERT(pred_cover == NULL);
1143 pred_cover = cur_range; 1153 pred_cover = cur_range;
1144 } 1154 }
1145 cur_range = cur_range->next(); 1155 cur_range = cur_range->next();
1146 } 1156 }
1147 1157
1148 if (cur_cover->IsSpilled()) return; 1158 if (cur_cover->IsSpilled()) return;
1149 ASSERT(pred_cover != NULL && cur_cover != NULL); 1159 ASSERT(pred_cover != NULL && cur_cover != NULL);
1150 if (pred_cover != cur_cover) { 1160 if (pred_cover != cur_cover) {
1151 LOperand* pred_op = pred_cover->CreateAssignedOperand(zone_); 1161 LOperand* pred_op = pred_cover->CreateAssignedOperand(chunk()->zone());
1152 LOperand* cur_op = cur_cover->CreateAssignedOperand(zone_); 1162 LOperand* cur_op = cur_cover->CreateAssignedOperand(chunk()->zone());
1153 if (!pred_op->Equals(cur_op)) { 1163 if (!pred_op->Equals(cur_op)) {
1154 LGap* gap = NULL; 1164 LGap* gap = NULL;
1155 if (block->predecessors()->length() == 1) { 1165 if (block->predecessors()->length() == 1) {
1156 gap = GapAt(block->first_instruction_index()); 1166 gap = GapAt(block->first_instruction_index());
1157 } else { 1167 } else {
1158 ASSERT(pred->end()->SecondSuccessor() == NULL); 1168 ASSERT(pred->end()->SecondSuccessor() == NULL);
1159 gap = GetLastGap(pred); 1169 gap = GetLastGap(pred);
1160 1170
1161 // We are going to insert a move before the branch instruction. 1171 // We are going to insert a move before the branch instruction.
1162 // Some branch instructions (e.g. loops' back edges) 1172 // Some branch instructions (e.g. loops' back edges)
1163 // can potentially cause a GC so they have a pointer map. 1173 // can potentially cause a GC so they have a pointer map.
1164 // By inserting a move we essentially create a copy of a 1174 // By inserting a move we essentially create a copy of a
1165 // value which is invisible to PopulatePointerMaps(), because we store 1175 // value which is invisible to PopulatePointerMaps(), because we store
1166 // it into a location different from the operand of a live range 1176 // it into a location different from the operand of a live range
1167 // covering a branch instruction. 1177 // covering a branch instruction.
1168 // Thus we need to manually record a pointer. 1178 // Thus we need to manually record a pointer.
1169 LInstruction* branch = InstructionAt(pred->last_instruction_index()); 1179 LInstruction* branch = InstructionAt(pred->last_instruction_index());
1170 if (branch->HasPointerMap()) { 1180 if (branch->HasPointerMap()) {
1171 if (HasTaggedValue(range->id())) { 1181 if (HasTaggedValue(range->id())) {
1172 branch->pointer_map()->RecordPointer(cur_op, zone()); 1182 branch->pointer_map()->RecordPointer(cur_op, chunk()->zone());
1173 } else if (!cur_op->IsDoubleStackSlot() && 1183 } else if (!cur_op->IsDoubleStackSlot() &&
1174 !cur_op->IsDoubleRegister()) { 1184 !cur_op->IsDoubleRegister()) {
1175 branch->pointer_map()->RemovePointer(cur_op); 1185 branch->pointer_map()->RemovePointer(cur_op);
1176 } 1186 }
1177 } 1187 }
1178 } 1188 }
1179 gap->GetOrCreateParallelMove( 1189 gap->GetOrCreateParallelMove(
1180 LGap::START, zone())->AddMove(pred_op, cur_op, zone()); 1190 LGap::START, chunk()->zone())->AddMove(pred_op, cur_op,
1191 chunk()->zone());
1181 } 1192 }
1182 } 1193 }
1183 } 1194 }
1184 1195
1185 1196
1186 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) { 1197 LParallelMove* LAllocator::GetConnectingParallelMove(LifetimePosition pos) {
1187 int index = pos.InstructionIndex(); 1198 int index = pos.InstructionIndex();
1188 if (IsGapAt(index)) { 1199 if (IsGapAt(index)) {
1189 LGap* gap = GapAt(index); 1200 LGap* gap = GapAt(index);
1190 return gap->GetOrCreateParallelMove( 1201 return gap->GetOrCreateParallelMove(
1191 pos.IsInstructionStart() ? LGap::START : LGap::END, zone()); 1202 pos.IsInstructionStart() ? LGap::START : LGap::END, chunk()->zone());
1192 } 1203 }
1193 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1); 1204 int gap_pos = pos.IsInstructionStart() ? (index - 1) : (index + 1);
1194 return GapAt(gap_pos)->GetOrCreateParallelMove( 1205 return GapAt(gap_pos)->GetOrCreateParallelMove(
1195 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, zone()); 1206 (gap_pos < index) ? LGap::AFTER : LGap::BEFORE, chunk()->zone());
1196 } 1207 }
1197 1208
1198 1209
1199 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) { 1210 HBasicBlock* LAllocator::GetBlock(LifetimePosition pos) {
1200 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex())); 1211 LGap* gap = GapAt(chunk_->NearestGapPos(pos.InstructionIndex()));
1201 return gap->block(); 1212 return gap->block();
1202 } 1213 }
1203 1214
1204 1215
1205 void LAllocator::ConnectRanges() { 1216 void LAllocator::ConnectRanges() {
1206 LAllocatorPhase phase("L_Connect ranges", this); 1217 LAllocatorPhase phase("L_Connect ranges", this);
1207 for (int i = 0; i < live_ranges()->length(); ++i) { 1218 for (int i = 0; i < live_ranges()->length(); ++i) {
1208 LiveRange* first_range = live_ranges()->at(i); 1219 LiveRange* first_range = live_ranges()->at(i);
1209 if (first_range == NULL || first_range->parent() != NULL) continue; 1220 if (first_range == NULL || first_range->parent() != NULL) continue;
1210 1221
1211 LiveRange* second_range = first_range->next(); 1222 LiveRange* second_range = first_range->next();
1212 while (second_range != NULL) { 1223 while (second_range != NULL) {
1213 LifetimePosition pos = second_range->Start(); 1224 LifetimePosition pos = second_range->Start();
1214 1225
1215 if (!second_range->IsSpilled()) { 1226 if (!second_range->IsSpilled()) {
1216 // Add gap move if the two live ranges touch and there is no block 1227 // Add gap move if the two live ranges touch and there is no block
1217 // boundary. 1228 // boundary.
1218 if (first_range->End().Value() == pos.Value()) { 1229 if (first_range->End().Value() == pos.Value()) {
1219 bool should_insert = true; 1230 bool should_insert = true;
1220 if (IsBlockBoundary(pos)) { 1231 if (IsBlockBoundary(pos)) {
1221 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos)); 1232 should_insert = CanEagerlyResolveControlFlow(GetBlock(pos));
1222 } 1233 }
1223 if (should_insert) { 1234 if (should_insert) {
1224 LParallelMove* move = GetConnectingParallelMove(pos); 1235 LParallelMove* move = GetConnectingParallelMove(pos);
1225 LOperand* prev_operand = first_range->CreateAssignedOperand(zone_); 1236 LOperand* prev_operand = first_range->CreateAssignedOperand(
1226 LOperand* cur_operand = second_range->CreateAssignedOperand(zone_); 1237 chunk()->zone());
1227 move->AddMove(prev_operand, cur_operand, zone()); 1238 LOperand* cur_operand = second_range->CreateAssignedOperand(
1239 chunk()->zone());
1240 move->AddMove(prev_operand, cur_operand,
1241 chunk()->zone());
1228 } 1242 }
1229 } 1243 }
1230 } 1244 }
1231 1245
1232 first_range = second_range; 1246 first_range = second_range;
1233 second_range = second_range->next(); 1247 second_range = second_range->next();
1234 } 1248 }
1235 } 1249 }
1236 } 1250 }
1237 1251
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
1282 const ZoneList<HPhi*>* phis = block->phis(); 1296 const ZoneList<HPhi*>* phis = block->phis();
1283 for (int i = 0; i < phis->length(); ++i) { 1297 for (int i = 0; i < phis->length(); ++i) {
1284 // The live range interval already ends at the first instruction of the 1298 // The live range interval already ends at the first instruction of the
1285 // block. 1299 // block.
1286 HPhi* phi = phis->at(i); 1300 HPhi* phi = phis->at(i);
1287 live->Remove(phi->id()); 1301 live->Remove(phi->id());
1288 1302
1289 LOperand* hint = NULL; 1303 LOperand* hint = NULL;
1290 LOperand* phi_operand = NULL; 1304 LOperand* phi_operand = NULL;
1291 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0)); 1305 LGap* gap = GetLastGap(phi->block()->predecessors()->at(0));
1292 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START, zone()); 1306 LParallelMove* move = gap->GetOrCreateParallelMove(LGap::START,
1307 chunk()->zone());
1293 for (int j = 0; j < move->move_operands()->length(); ++j) { 1308 for (int j = 0; j < move->move_operands()->length(); ++j) {
1294 LOperand* to = move->move_operands()->at(j).destination(); 1309 LOperand* to = move->move_operands()->at(j).destination();
1295 if (to->IsUnallocated() && 1310 if (to->IsUnallocated() &&
1296 LUnallocated::cast(to)->virtual_register() == phi->id()) { 1311 LUnallocated::cast(to)->virtual_register() == phi->id()) {
1297 hint = move->move_operands()->at(j).source(); 1312 hint = move->move_operands()->at(j).source();
1298 phi_operand = to; 1313 phi_operand = to;
1299 break; 1314 break;
1300 } 1315 }
1301 } 1316 }
1302 ASSERT(hint != NULL); 1317 ASSERT(hint != NULL);
(...skipping 16 matching lines...) Expand all
1319 // header. 1334 // header.
1320 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge(); 1335 HBasicBlock* back_edge = block->loop_information()->GetLastBackEdge();
1321 BitVector::Iterator iterator(live); 1336 BitVector::Iterator iterator(live);
1322 LifetimePosition start = LifetimePosition::FromInstructionIndex( 1337 LifetimePosition start = LifetimePosition::FromInstructionIndex(
1323 block->first_instruction_index()); 1338 block->first_instruction_index());
1324 LifetimePosition end = LifetimePosition::FromInstructionIndex( 1339 LifetimePosition end = LifetimePosition::FromInstructionIndex(
1325 back_edge->last_instruction_index()).NextInstruction(); 1340 back_edge->last_instruction_index()).NextInstruction();
1326 while (!iterator.Done()) { 1341 while (!iterator.Done()) {
1327 int operand_index = iterator.Current(); 1342 int operand_index = iterator.Current();
1328 LiveRange* range = LiveRangeFor(operand_index); 1343 LiveRange* range = LiveRangeFor(operand_index);
1329 range->EnsureInterval(start, end, zone_); 1344 range->EnsureInterval(start, end, zone());
1330 iterator.Advance(); 1345 iterator.Advance();
1331 } 1346 }
1332 1347
1333 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) { 1348 for (int i = block->block_id() + 1; i <= back_edge->block_id(); ++i) {
1334 live_in_sets_[i]->Union(*live); 1349 live_in_sets_[i]->Union(*live);
1335 } 1350 }
1336 } 1351 }
1337 1352
1338 #ifdef DEBUG 1353 #ifdef DEBUG
1339 if (block_id == 0) { 1354 if (block_id == 0) {
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after
1440 cur = cur->next(); 1455 cur = cur->next();
1441 } 1456 }
1442 if (cur == NULL) continue; 1457 if (cur == NULL) continue;
1443 1458
1444 // Check if the live range is spilled and the safe point is after 1459 // Check if the live range is spilled and the safe point is after
1445 // the spill position. 1460 // the spill position.
1446 if (range->HasAllocatedSpillOperand() && 1461 if (range->HasAllocatedSpillOperand() &&
1447 safe_point >= range->spill_start_index()) { 1462 safe_point >= range->spill_start_index()) {
1448 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n", 1463 TraceAlloc("Pointer for range %d (spilled at %d) at safe point %d\n",
1449 range->id(), range->spill_start_index(), safe_point); 1464 range->id(), range->spill_start_index(), safe_point);
1450 map->RecordPointer(range->GetSpillOperand(), zone()); 1465 map->RecordPointer(range->GetSpillOperand(), chunk()->zone());
1451 } 1466 }
1452 1467
1453 if (!cur->IsSpilled()) { 1468 if (!cur->IsSpilled()) {
1454 TraceAlloc("Pointer in register for range %d (start at %d) " 1469 TraceAlloc("Pointer in register for range %d (start at %d) "
1455 "at safe point %d\n", 1470 "at safe point %d\n",
1456 cur->id(), cur->Start().Value(), safe_point); 1471 cur->id(), cur->Start().Value(), safe_point);
1457 LOperand* operand = cur->CreateAssignedOperand(zone_); 1472 LOperand* operand = cur->CreateAssignedOperand(chunk()->zone());
1458 ASSERT(!operand->IsStackSlot()); 1473 ASSERT(!operand->IsStackSlot());
1459 map->RecordPointer(operand, zone()); 1474 map->RecordPointer(operand, chunk()->zone());
1460 } 1475 }
1461 } 1476 }
1462 } 1477 }
1463 } 1478 }
1464 1479
1465 1480
1466 void LAllocator::AllocateGeneralRegisters() { 1481 void LAllocator::AllocateGeneralRegisters() {
1467 LAllocatorPhase phase("L_Allocate general registers", this); 1482 LAllocatorPhase phase("L_Allocate general registers", this);
1468 num_registers_ = Register::NumAllocatableRegisters(); 1483 num_registers_ = Register::NumAllocatableRegisters();
1469 AllocateRegisters(); 1484 AllocateRegisters();
(...skipping 320 matching lines...) Expand 10 before | Expand all | Expand 10 after
1790 RegisterName(register_index), 1805 RegisterName(register_index),
1791 free_until_pos[register_index].Value(), 1806 free_until_pos[register_index].Value(),
1792 current->id(), 1807 current->id(),
1793 current->End().Value()); 1808 current->End().Value());
1794 1809
1795 // The desired register is free until the end of the current live range. 1810 // The desired register is free until the end of the current live range.
1796 if (free_until_pos[register_index].Value() >= current->End().Value()) { 1811 if (free_until_pos[register_index].Value() >= current->End().Value()) {
1797 TraceAlloc("Assigning preferred reg %s to live range %d\n", 1812 TraceAlloc("Assigning preferred reg %s to live range %d\n",
1798 RegisterName(register_index), 1813 RegisterName(register_index),
1799 current->id()); 1814 current->id());
1800 SetLiveRangeAssignedRegister(current, register_index, mode_, zone_); 1815 SetLiveRangeAssignedRegister(current, register_index, mode_);
1801 return true; 1816 return true;
1802 } 1817 }
1803 } 1818 }
1804 1819
1805 // Find the register which stays free for the longest time. 1820 // Find the register which stays free for the longest time.
1806 int reg = 0; 1821 int reg = 0;
1807 for (int i = 1; i < RegisterCount(); ++i) { 1822 for (int i = 1; i < RegisterCount(); ++i) {
1808 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) { 1823 if (free_until_pos[i].Value() > free_until_pos[reg].Value()) {
1809 reg = i; 1824 reg = i;
1810 } 1825 }
(...skipping 14 matching lines...) Expand all
1825 AddToUnhandledSorted(tail); 1840 AddToUnhandledSorted(tail);
1826 } 1841 }
1827 1842
1828 1843
1829 // Register reg is available at the range start and is free until 1844 // Register reg is available at the range start and is free until
1830 // the range end. 1845 // the range end.
1831 ASSERT(pos.Value() >= current->End().Value()); 1846 ASSERT(pos.Value() >= current->End().Value());
1832 TraceAlloc("Assigning free reg %s to live range %d\n", 1847 TraceAlloc("Assigning free reg %s to live range %d\n",
1833 RegisterName(reg), 1848 RegisterName(reg),
1834 current->id()); 1849 current->id());
1835 SetLiveRangeAssignedRegister(current, reg, mode_, zone_); 1850 SetLiveRangeAssignedRegister(current, reg, mode_);
1836 1851
1837 return true; 1852 return true;
1838 } 1853 }
1839 1854
1840 1855
1841 void LAllocator::AllocateBlockedReg(LiveRange* current) { 1856 void LAllocator::AllocateBlockedReg(LiveRange* current) {
1842 UsePosition* register_use = current->NextRegisterPosition(current->Start()); 1857 UsePosition* register_use = current->NextRegisterPosition(current->Start());
1843 if (register_use == NULL) { 1858 if (register_use == NULL) {
1844 // There is no use in the current live range that requires a register. 1859 // There is no use in the current live range that requires a register.
1845 // We can just spill it. 1860 // We can just spill it.
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after
1910 block_pos[reg].InstructionStart()); 1925 block_pos[reg].InstructionStart());
1911 if (!AllocationOk()) return; 1926 if (!AllocationOk()) return;
1912 AddToUnhandledSorted(tail); 1927 AddToUnhandledSorted(tail);
1913 } 1928 }
1914 1929
1915 // Register reg is not blocked for the whole range. 1930 // Register reg is not blocked for the whole range.
1916 ASSERT(block_pos[reg].Value() >= current->End().Value()); 1931 ASSERT(block_pos[reg].Value() >= current->End().Value());
1917 TraceAlloc("Assigning blocked reg %s to live range %d\n", 1932 TraceAlloc("Assigning blocked reg %s to live range %d\n",
1918 RegisterName(reg), 1933 RegisterName(reg),
1919 current->id()); 1934 current->id());
1920 SetLiveRangeAssignedRegister(current, reg, mode_, zone_); 1935 SetLiveRangeAssignedRegister(current, reg, mode_);
1921 1936
1922 // This register was not free. Thus we need to find and spill 1937 // This register was not free. Thus we need to find and spill
1923 // parts of active and inactive live regions that use the same register 1938 // parts of active and inactive live regions that use the same register
1924 // at the same lifetime positions as current. 1939 // at the same lifetime positions as current.
1925 SplitAndSpillIntersecting(current); 1940 SplitAndSpillIntersecting(current);
1926 } 1941 }
1927 1942
1928 1943
1929 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range, 1944 LifetimePosition LAllocator::FindOptimalSpillingPos(LiveRange* range,
1930 LifetimePosition pos) { 1945 LifetimePosition pos) {
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after
2022 if (pos.Value() <= range->Start().Value()) return range; 2037 if (pos.Value() <= range->Start().Value()) return range;
2023 2038
2024 // We can't properly connect liveranges if split occured at the end 2039 // We can't properly connect liveranges if split occured at the end
2025 // of control instruction. 2040 // of control instruction.
2026 ASSERT(pos.IsInstructionStart() || 2041 ASSERT(pos.IsInstructionStart() ||
2027 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl()); 2042 !chunk_->instructions()->at(pos.InstructionIndex())->IsControl());
2028 2043
2029 int vreg = GetVirtualRegister(); 2044 int vreg = GetVirtualRegister();
2030 if (!AllocationOk()) return NULL; 2045 if (!AllocationOk()) return NULL;
2031 LiveRange* result = LiveRangeFor(vreg); 2046 LiveRange* result = LiveRangeFor(vreg);
2032 range->SplitAt(pos, result, zone_); 2047 range->SplitAt(pos, result, zone());
2033 return result; 2048 return result;
2034 } 2049 }
2035 2050
2036 2051
2037 LiveRange* LAllocator::SplitBetween(LiveRange* range, 2052 LiveRange* LAllocator::SplitBetween(LiveRange* range,
2038 LifetimePosition start, 2053 LifetimePosition start,
2039 LifetimePosition end) { 2054 LifetimePosition end) {
2040 ASSERT(!range->IsFixed()); 2055 ASSERT(!range->IsFixed());
2041 TraceAlloc("Splitting live range %d in position between [%d, %d]\n", 2056 TraceAlloc("Splitting live range %d in position between [%d, %d]\n",
2042 range->id(), 2057 range->id(),
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
2130 void LAllocator::Spill(LiveRange* range) { 2145 void LAllocator::Spill(LiveRange* range) {
2131 ASSERT(!range->IsSpilled()); 2146 ASSERT(!range->IsSpilled());
2132 TraceAlloc("Spilling live range %d\n", range->id()); 2147 TraceAlloc("Spilling live range %d\n", range->id());
2133 LiveRange* first = range->TopLevel(); 2148 LiveRange* first = range->TopLevel();
2134 2149
2135 if (!first->HasAllocatedSpillOperand()) { 2150 if (!first->HasAllocatedSpillOperand()) {
2136 LOperand* op = TryReuseSpillSlot(range); 2151 LOperand* op = TryReuseSpillSlot(range);
2137 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS); 2152 if (op == NULL) op = chunk_->GetNextSpillSlot(mode_ == DOUBLE_REGISTERS);
2138 first->SetSpillOperand(op); 2153 first->SetSpillOperand(op);
2139 } 2154 }
2140 range->MakeSpilled(zone_); 2155 range->MakeSpilled(chunk()->zone());
2141 } 2156 }
2142 2157
2143 2158
2144 int LAllocator::RegisterCount() const { 2159 int LAllocator::RegisterCount() const {
2145 return num_registers_; 2160 return num_registers_;
2146 } 2161 }
2147 2162
2148 2163
2149 #ifdef DEBUG 2164 #ifdef DEBUG
2150 2165
2151 2166
2152 void LAllocator::Verify() const { 2167 void LAllocator::Verify() const {
2153 for (int i = 0; i < live_ranges()->length(); ++i) { 2168 for (int i = 0; i < live_ranges()->length(); ++i) {
2154 LiveRange* current = live_ranges()->at(i); 2169 LiveRange* current = live_ranges()->at(i);
2155 if (current != NULL) current->Verify(); 2170 if (current != NULL) current->Verify();
2156 } 2171 }
2157 } 2172 }
2158 2173
2159 2174
2160 #endif 2175 #endif
2161 2176
2162 2177
2178 LAllocatorPhase::LAllocatorPhase(const char* name, LAllocator* allocator)
2179 : CompilationPhase(name, allocator->graph()->info()),
2180 allocator_(allocator) {
2181 if (FLAG_hydrogen_stats) {
2182 allocator_zone_start_allocation_size_ =
2183 allocator->zone()->allocation_size();
2184 }
2185 }
2186
2187
2163 LAllocatorPhase::~LAllocatorPhase() { 2188 LAllocatorPhase::~LAllocatorPhase() {
2189 if (FLAG_hydrogen_stats) {
2190 unsigned size = allocator_->zone()->allocation_size() -
2191 allocator_zone_start_allocation_size_;
2192 isolate()->GetHStatistics()->SaveTiming(name(), 0, size);
2193 }
2194
2164 if (ShouldProduceTraceOutput()) { 2195 if (ShouldProduceTraceOutput()) {
2165 isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk()); 2196 isolate()->GetHTracer()->TraceLithium(name(), allocator_->chunk());
2166 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_); 2197 isolate()->GetHTracer()->TraceLiveRanges(name(), allocator_);
2167 } 2198 }
2168 2199
2169 #ifdef DEBUG 2200 #ifdef DEBUG
2170 if (allocator_ != NULL) allocator_->Verify(); 2201 if (allocator_ != NULL) allocator_->Verify();
2171 #endif 2202 #endif
2172 } 2203 }
2173 2204
2174 2205
2175 } } // namespace v8::internal 2206 } } // namespace v8::internal
OLDNEW
« no previous file with comments | « src/lithium-allocator.h ('k') | src/lithium-allocator-inl.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698