Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(414)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 6366003: Port new version of ParallelMove's LGapResolver to X64. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: '' Created 9 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 19 matching lines...) Expand all
30 #if defined(V8_TARGET_ARCH_X64) 30 #if defined(V8_TARGET_ARCH_X64)
31 31
32 #include "x64/lithium-codegen-x64.h" 32 #include "x64/lithium-codegen-x64.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "stub-cache.h" 34 #include "stub-cache.h"
35 35
36 namespace v8 { 36 namespace v8 {
37 namespace internal { 37 namespace internal {
38 38
39 39
40 class LGapNode: public ZoneObject {
41 public:
42 explicit LGapNode(LOperand* operand)
43 : operand_(operand), resolved_(false), visited_id_(-1) { }
44
45 LOperand* operand() const { return operand_; }
46 bool IsResolved() const { return !IsAssigned() || resolved_; }
47 void MarkResolved() {
48 ASSERT(!IsResolved());
49 resolved_ = true;
50 }
51 int visited_id() const { return visited_id_; }
52 void set_visited_id(int id) {
53 ASSERT(id > visited_id_);
54 visited_id_ = id;
55 }
56
57 bool IsAssigned() const { return assigned_from_.is_set(); }
58 LGapNode* assigned_from() const { return assigned_from_.get(); }
59 void set_assigned_from(LGapNode* n) { assigned_from_.set(n); }
60
61 private:
62 LOperand* operand_;
63 SetOncePointer<LGapNode> assigned_from_;
64 bool resolved_;
65 int visited_id_;
66 };
67
68
69 LGapResolver::LGapResolver()
70 : nodes_(32),
71 identified_cycles_(4),
72 result_(16),
73 next_visited_id_(0) {
74 }
75
76
77 const ZoneList<LMoveOperands>* LGapResolver::Resolve(
78 const ZoneList<LMoveOperands>* moves,
79 LOperand* marker_operand) {
80 nodes_.Rewind(0);
81 identified_cycles_.Rewind(0);
82 result_.Rewind(0);
83 next_visited_id_ = 0;
84
85 for (int i = 0; i < moves->length(); ++i) {
86 LMoveOperands move = moves->at(i);
87 if (!move.IsRedundant()) RegisterMove(move);
88 }
89
90 for (int i = 0; i < identified_cycles_.length(); ++i) {
91 ResolveCycle(identified_cycles_[i], marker_operand);
92 }
93
94 int unresolved_nodes;
95 do {
96 unresolved_nodes = 0;
97 for (int j = 0; j < nodes_.length(); j++) {
98 LGapNode* node = nodes_[j];
99 if (!node->IsResolved() && node->assigned_from()->IsResolved()) {
100 AddResultMove(node->assigned_from(), node);
101 node->MarkResolved();
102 }
103 if (!node->IsResolved()) ++unresolved_nodes;
104 }
105 } while (unresolved_nodes > 0);
106 return &result_;
107 }
108
109
110 void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) {
111 AddResultMove(from->operand(), to->operand());
112 }
113
114
115 void LGapResolver::AddResultMove(LOperand* from, LOperand* to) {
116 result_.Add(LMoveOperands(from, to));
117 }
118
119
120 void LGapResolver::ResolveCycle(LGapNode* start, LOperand* marker_operand) {
121 ZoneList<LOperand*> cycle_operands(8);
122 cycle_operands.Add(marker_operand);
123 LGapNode* cur = start;
124 do {
125 cur->MarkResolved();
126 cycle_operands.Add(cur->operand());
127 cur = cur->assigned_from();
128 } while (cur != start);
129 cycle_operands.Add(marker_operand);
130
131 for (int i = cycle_operands.length() - 1; i > 0; --i) {
132 LOperand* from = cycle_operands[i];
133 LOperand* to = cycle_operands[i - 1];
134 AddResultMove(from, to);
135 }
136 }
137
138
139 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) {
140 ASSERT(a != b);
141 LGapNode* cur = a;
142 while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) {
143 cur->set_visited_id(visited_id);
144 cur = cur->assigned_from();
145 }
146
147 return cur == b;
148 }
149
150
151 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) {
152 ASSERT(a != b);
153 return CanReach(a, b, next_visited_id_++);
154 }
155
156
157 void LGapResolver::RegisterMove(LMoveOperands move) {
158 if (move.source()->IsConstantOperand()) {
159 // Constant moves should be last in the machine code. Therefore add them
160 // first to the result set.
161 AddResultMove(move.source(), move.destination());
162 } else {
163 LGapNode* from = LookupNode(move.source());
164 LGapNode* to = LookupNode(move.destination());
165 if (to->IsAssigned() && to->assigned_from() == from) {
166 move.Eliminate();
167 return;
168 }
169 ASSERT(!to->IsAssigned());
170 if (CanReach(from, to)) {
171 // This introduces a cycle. Save.
172 identified_cycles_.Add(from);
173 }
174 to->set_assigned_from(from);
175 }
176 }
177
178
179 LGapNode* LGapResolver::LookupNode(LOperand* operand) {
180 for (int i = 0; i < nodes_.length(); ++i) {
181 if (nodes_[i]->operand()->Equals(operand)) return nodes_[i];
182 }
183
184 // No node found => create a new one.
185 LGapNode* result = new LGapNode(operand);
186 nodes_.Add(result);
187 return result;
188 }
189
190
191 #define __ masm()-> 40 #define __ masm()->
192 41
193 bool LCodeGen::GenerateCode() { 42 bool LCodeGen::GenerateCode() {
194 HPhase phase("Code generation", chunk()); 43 HPhase phase("Code generation", chunk());
195 ASSERT(is_unused()); 44 ASSERT(is_unused());
196 status_ = GENERATING; 45 status_ = GENERATING;
197 return GeneratePrologue() && 46 return GeneratePrologue() &&
198 GenerateBody() && 47 GenerateBody() &&
199 GenerateDeferredCode() && 48 GenerateDeferredCode() &&
200 GenerateSafepointTable(); 49 GenerateSafepointTable();
(...skipping 474 matching lines...) Expand 10 before | Expand all | Expand 10 after
675 } else { 524 } else {
676 Comment(";;; B%d", label->block_id()); 525 Comment(";;; B%d", label->block_id());
677 } 526 }
678 __ bind(label->label()); 527 __ bind(label->label());
679 current_block_ = label->block_id(); 528 current_block_ = label->block_id();
680 LCodeGen::DoGap(label); 529 LCodeGen::DoGap(label);
681 } 530 }
682 531
683 532
684 void LCodeGen::DoParallelMove(LParallelMove* move) { 533 void LCodeGen::DoParallelMove(LParallelMove* move) {
685 // xmm0 must always be a scratch register. 534 resolver_.Resolve(move);
686 XMMRegister xmm_scratch = xmm0;
687 LUnallocated marker_operand(LUnallocated::NONE);
688
689 Register cpu_scratch = kScratchRegister;
690
691 const ZoneList<LMoveOperands>* moves =
692 resolver_.Resolve(move->move_operands(), &marker_operand);
693 for (int i = moves->length() - 1; i >= 0; --i) {
694 LMoveOperands move = moves->at(i);
695 LOperand* from = move.source();
696 LOperand* to = move.destination();
697 ASSERT(!from->IsDoubleRegister() ||
698 !ToDoubleRegister(from).is(xmm_scratch));
699 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch));
700 ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch));
701 ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch));
702 if (from->IsConstantOperand()) {
703 LConstantOperand* constant_from = LConstantOperand::cast(from);
704 if (to->IsRegister()) {
705 if (IsInteger32Constant(constant_from)) {
706 __ movl(ToRegister(to), Immediate(ToInteger32(constant_from)));
707 } else {
708 __ Move(ToRegister(to), ToHandle(constant_from));
709 }
710 } else {
711 if (IsInteger32Constant(constant_from)) {
712 __ movl(ToOperand(to), Immediate(ToInteger32(constant_from)));
713 } else {
714 __ Move(ToOperand(to), ToHandle(constant_from));
715 }
716 }
717 } else if (from == &marker_operand) {
718 if (to->IsRegister()) {
719 __ movq(ToRegister(to), cpu_scratch);
720 } else if (to->IsStackSlot()) {
721 __ movq(ToOperand(to), cpu_scratch);
722 } else if (to->IsDoubleRegister()) {
723 __ movsd(ToDoubleRegister(to), xmm_scratch);
724 } else {
725 ASSERT(to->IsDoubleStackSlot());
726 __ movsd(ToOperand(to), xmm_scratch);
727 }
728 } else if (to == &marker_operand) {
729 if (from->IsRegister()) {
730 __ movq(cpu_scratch, ToRegister(from));
731 } else if (from->IsStackSlot()) {
732 __ movq(cpu_scratch, ToOperand(from));
733 } else if (from->IsDoubleRegister()) {
734 __ movsd(xmm_scratch, ToDoubleRegister(from));
735 } else {
736 ASSERT(from->IsDoubleStackSlot());
737 __ movsd(xmm_scratch, ToOperand(from));
738 }
739 } else if (from->IsRegister()) {
740 if (to->IsRegister()) {
741 __ movq(ToRegister(to), ToRegister(from));
742 } else {
743 __ movq(ToOperand(to), ToRegister(from));
744 }
745 } else if (to->IsRegister()) {
746 __ movq(ToRegister(to), ToOperand(from));
747 } else if (from->IsStackSlot()) {
748 ASSERT(to->IsStackSlot());
749 __ push(rax);
750 __ movq(rax, ToOperand(from));
751 __ movq(ToOperand(to), rax);
752 __ pop(rax);
753 } else if (from->IsDoubleRegister()) {
754 ASSERT(to->IsDoubleStackSlot());
755 __ movsd(ToOperand(to), ToDoubleRegister(from));
756 } else if (to->IsDoubleRegister()) {
757 ASSERT(from->IsDoubleStackSlot());
758 __ movsd(ToDoubleRegister(to), ToOperand(from));
759 } else {
760 ASSERT(to->IsDoubleStackSlot() && from->IsDoubleStackSlot());
761 __ movsd(xmm_scratch, ToOperand(from));
762 __ movsd(ToOperand(to), xmm_scratch);
763 }
764 }
765 } 535 }
766 536
767 537
768 void LCodeGen::DoGap(LGap* gap) { 538 void LCodeGen::DoGap(LGap* gap) {
769 for (int i = LGap::FIRST_INNER_POSITION; 539 for (int i = LGap::FIRST_INNER_POSITION;
770 i <= LGap::LAST_INNER_POSITION; 540 i <= LGap::LAST_INNER_POSITION;
771 i++) { 541 i++) {
772 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); 542 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i);
773 LParallelMove* move = gap->GetParallelMove(inner_pos); 543 LParallelMove* move = gap->GetParallelMove(inner_pos);
774 if (move != NULL) DoParallelMove(move); 544 if (move != NULL) DoParallelMove(move);
(...skipping 1157 matching lines...) Expand 10 before | Expand all | Expand 10 after
1932 1702
1933 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 1703 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
1934 Abort("Unimplemented: %s", "DoOsrEntry"); 1704 Abort("Unimplemented: %s", "DoOsrEntry");
1935 } 1705 }
1936 1706
1937 #undef __ 1707 #undef __
1938 1708
1939 } } // namespace v8::internal 1709 } } // namespace v8::internal
1940 1710
1941 #endif // V8_TARGET_ARCH_X64 1711 #endif // V8_TARGET_ARCH_X64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698