| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 51 codegen_->RecordSafepoint(pointers_, deoptimization_index_); | 51 codegen_->RecordSafepoint(pointers_, deoptimization_index_); |
| 52 } | 52 } |
| 53 | 53 |
| 54 private: | 54 private: |
| 55 LCodeGen* codegen_; | 55 LCodeGen* codegen_; |
| 56 LPointerMap* pointers_; | 56 LPointerMap* pointers_; |
| 57 int deoptimization_index_; | 57 int deoptimization_index_; |
| 58 }; | 58 }; |
| 59 | 59 |
| 60 | 60 |
| 61 class LGapNode: public ZoneObject { | |
| 62 public: | |
| 63 explicit LGapNode(LOperand* operand) | |
| 64 : operand_(operand), resolved_(false), visited_id_(-1) { } | |
| 65 | |
| 66 LOperand* operand() const { return operand_; } | |
| 67 bool IsResolved() const { return !IsAssigned() || resolved_; } | |
| 68 void MarkResolved() { | |
| 69 ASSERT(!IsResolved()); | |
| 70 resolved_ = true; | |
| 71 } | |
| 72 int visited_id() const { return visited_id_; } | |
| 73 void set_visited_id(int id) { | |
| 74 ASSERT(id > visited_id_); | |
| 75 visited_id_ = id; | |
| 76 } | |
| 77 | |
| 78 bool IsAssigned() const { return assigned_from_.is_set(); } | |
| 79 LGapNode* assigned_from() const { return assigned_from_.get(); } | |
| 80 void set_assigned_from(LGapNode* n) { assigned_from_.set(n); } | |
| 81 | |
| 82 private: | |
| 83 LOperand* operand_; | |
| 84 SetOncePointer<LGapNode> assigned_from_; | |
| 85 bool resolved_; | |
| 86 int visited_id_; | |
| 87 }; | |
| 88 | |
| 89 | |
| 90 LGapResolver::LGapResolver() | |
| 91 : nodes_(32), | |
| 92 identified_cycles_(4), | |
| 93 result_(16), | |
| 94 next_visited_id_(0) { | |
| 95 } | |
| 96 | |
| 97 | |
| 98 const ZoneList<LMoveOperands>* LGapResolver::Resolve( | |
| 99 const ZoneList<LMoveOperands>* moves, | |
| 100 LOperand* marker_operand) { | |
| 101 nodes_.Rewind(0); | |
| 102 identified_cycles_.Rewind(0); | |
| 103 result_.Rewind(0); | |
| 104 next_visited_id_ = 0; | |
| 105 | |
| 106 for (int i = 0; i < moves->length(); ++i) { | |
| 107 LMoveOperands move = moves->at(i); | |
| 108 if (!move.IsRedundant()) RegisterMove(move); | |
| 109 } | |
| 110 | |
| 111 for (int i = 0; i < identified_cycles_.length(); ++i) { | |
| 112 ResolveCycle(identified_cycles_[i], marker_operand); | |
| 113 } | |
| 114 | |
| 115 int unresolved_nodes; | |
| 116 do { | |
| 117 unresolved_nodes = 0; | |
| 118 for (int j = 0; j < nodes_.length(); j++) { | |
| 119 LGapNode* node = nodes_[j]; | |
| 120 if (!node->IsResolved() && node->assigned_from()->IsResolved()) { | |
| 121 AddResultMove(node->assigned_from(), node); | |
| 122 node->MarkResolved(); | |
| 123 } | |
| 124 if (!node->IsResolved()) ++unresolved_nodes; | |
| 125 } | |
| 126 } while (unresolved_nodes > 0); | |
| 127 return &result_; | |
| 128 } | |
| 129 | |
| 130 | |
| 131 void LGapResolver::AddResultMove(LGapNode* from, LGapNode* to) { | |
| 132 AddResultMove(from->operand(), to->operand()); | |
| 133 } | |
| 134 | |
| 135 | |
| 136 void LGapResolver::AddResultMove(LOperand* from, LOperand* to) { | |
| 137 result_.Add(LMoveOperands(from, to)); | |
| 138 } | |
| 139 | |
| 140 | |
| 141 void LGapResolver::ResolveCycle(LGapNode* start, LOperand* marker_operand) { | |
| 142 ZoneList<LOperand*> cycle_operands(8); | |
| 143 cycle_operands.Add(marker_operand); | |
| 144 LGapNode* cur = start; | |
| 145 do { | |
| 146 cur->MarkResolved(); | |
| 147 cycle_operands.Add(cur->operand()); | |
| 148 cur = cur->assigned_from(); | |
| 149 } while (cur != start); | |
| 150 cycle_operands.Add(marker_operand); | |
| 151 | |
| 152 for (int i = cycle_operands.length() - 1; i > 0; --i) { | |
| 153 LOperand* from = cycle_operands[i]; | |
| 154 LOperand* to = cycle_operands[i - 1]; | |
| 155 AddResultMove(from, to); | |
| 156 } | |
| 157 } | |
| 158 | |
| 159 | |
| 160 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b, int visited_id) { | |
| 161 ASSERT(a != b); | |
| 162 LGapNode* cur = a; | |
| 163 while (cur != b && cur->visited_id() != visited_id && cur->IsAssigned()) { | |
| 164 cur->set_visited_id(visited_id); | |
| 165 cur = cur->assigned_from(); | |
| 166 } | |
| 167 | |
| 168 return cur == b; | |
| 169 } | |
| 170 | |
| 171 | |
| 172 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) { | |
| 173 ASSERT(a != b); | |
| 174 return CanReach(a, b, next_visited_id_++); | |
| 175 } | |
| 176 | |
| 177 | |
| 178 void LGapResolver::RegisterMove(LMoveOperands move) { | |
| 179 if (move.from()->IsConstantOperand()) { | |
| 180 // Constant moves should be last in the machine code. Therefore add them | |
| 181 // first to the result set. | |
| 182 AddResultMove(move.from(), move.to()); | |
| 183 } else { | |
| 184 LGapNode* from = LookupNode(move.from()); | |
| 185 LGapNode* to = LookupNode(move.to()); | |
| 186 if (to->IsAssigned() && to->assigned_from() == from) { | |
| 187 move.Eliminate(); | |
| 188 return; | |
| 189 } | |
| 190 ASSERT(!to->IsAssigned()); | |
| 191 if (CanReach(from, to)) { | |
| 192 // This introduces a cycle. Save. | |
| 193 identified_cycles_.Add(from); | |
| 194 } | |
| 195 to->set_assigned_from(from); | |
| 196 } | |
| 197 } | |
| 198 | |
| 199 | |
| 200 LGapNode* LGapResolver::LookupNode(LOperand* operand) { | |
| 201 for (int i = 0; i < nodes_.length(); ++i) { | |
| 202 if (nodes_[i]->operand()->Equals(operand)) return nodes_[i]; | |
| 203 } | |
| 204 | |
| 205 // No node found => create a new one. | |
| 206 LGapNode* result = new LGapNode(operand); | |
| 207 nodes_.Add(result); | |
| 208 return result; | |
| 209 } | |
| 210 | |
| 211 | |
| 212 #define __ masm()-> | 61 #define __ masm()-> |
| 213 | 62 |
| 214 bool LCodeGen::GenerateCode() { | 63 bool LCodeGen::GenerateCode() { |
| 215 HPhase phase("Code generation", chunk()); | 64 HPhase phase("Code generation", chunk()); |
| 216 ASSERT(is_unused()); | 65 ASSERT(is_unused()); |
| 217 status_ = GENERATING; | 66 status_ = GENERATING; |
| 218 CpuFeatures::Scope scope(SSE2); | 67 CpuFeatures::Scope scope(SSE2); |
| 219 return GeneratePrologue() && | 68 return GeneratePrologue() && |
| 220 GenerateBody() && | 69 GenerateBody() && |
| 221 GenerateDeferredCode() && | 70 GenerateDeferredCode() && |
| (...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 420 // Local or spill slot. Skip the frame pointer, function, and | 269 // Local or spill slot. Skip the frame pointer, function, and |
| 421 // context in the fixed part of the frame. | 270 // context in the fixed part of the frame. |
| 422 return Operand(ebp, -(index + 3) * kPointerSize); | 271 return Operand(ebp, -(index + 3) * kPointerSize); |
| 423 } else { | 272 } else { |
| 424 // Incoming parameter. Skip the return address. | 273 // Incoming parameter. Skip the return address. |
| 425 return Operand(ebp, -(index - 1) * kPointerSize); | 274 return Operand(ebp, -(index - 1) * kPointerSize); |
| 426 } | 275 } |
| 427 } | 276 } |
| 428 | 277 |
| 429 | 278 |
| 279 Operand LCodeGen::HighOperand(LOperand* op) { |
| 280 ASSERT(op->IsDoubleStackSlot()); |
| 281 int index = op->index(); |
| 282 int offset = (index >= 0) ? index + 3 : index - 1; |
| 283 return Operand(ebp, -offset * kPointerSize); |
| 284 } |
| 285 |
| 286 |
| 430 void LCodeGen::WriteTranslation(LEnvironment* environment, | 287 void LCodeGen::WriteTranslation(LEnvironment* environment, |
| 431 Translation* translation) { | 288 Translation* translation) { |
| 432 if (environment == NULL) return; | 289 if (environment == NULL) return; |
| 433 | 290 |
| 434 // The translation includes one command per value in the environment. | 291 // The translation includes one command per value in the environment. |
| 435 int translation_size = environment->values()->length(); | 292 int translation_size = environment->values()->length(); |
| 436 // The output frame height does not include the parameters. | 293 // The output frame height does not include the parameters. |
| 437 int height = translation_size - environment->parameter_count(); | 294 int height = translation_size - environment->parameter_count(); |
| 438 | 295 |
| 439 WriteTranslation(environment->outer(), translation); | 296 WriteTranslation(environment->outer(), translation); |
| (...skipping 315 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 755 } else { | 612 } else { |
| 756 Comment(";;; B%d", label->block_id()); | 613 Comment(";;; B%d", label->block_id()); |
| 757 } | 614 } |
| 758 __ bind(label->label()); | 615 __ bind(label->label()); |
| 759 current_block_ = label->block_id(); | 616 current_block_ = label->block_id(); |
| 760 LCodeGen::DoGap(label); | 617 LCodeGen::DoGap(label); |
| 761 } | 618 } |
| 762 | 619 |
| 763 | 620 |
| 764 void LCodeGen::DoParallelMove(LParallelMove* move) { | 621 void LCodeGen::DoParallelMove(LParallelMove* move) { |
| 765 // xmm0 must always be a scratch register. | 622 resolver_.Resolve(move); |
| 766 XMMRegister xmm_scratch = xmm0; | |
| 767 LUnallocated marker_operand(LUnallocated::NONE); | |
| 768 | |
| 769 Register cpu_scratch = esi; | |
| 770 bool destroys_cpu_scratch = false; | |
| 771 | |
| 772 const ZoneList<LMoveOperands>* moves = | |
| 773 resolver_.Resolve(move->move_operands(), &marker_operand); | |
| 774 for (int i = moves->length() - 1; i >= 0; --i) { | |
| 775 LMoveOperands move = moves->at(i); | |
| 776 LOperand* from = move.from(); | |
| 777 LOperand* to = move.to(); | |
| 778 ASSERT(!from->IsDoubleRegister() || | |
| 779 !ToDoubleRegister(from).is(xmm_scratch)); | |
| 780 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); | |
| 781 ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); | |
| 782 ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); | |
| 783 if (from->IsConstantOperand()) { | |
| 784 __ mov(ToOperand(to), ToImmediate(from)); | |
| 785 } else if (from == &marker_operand) { | |
| 786 if (to->IsRegister() || to->IsStackSlot()) { | |
| 787 __ mov(ToOperand(to), cpu_scratch); | |
| 788 ASSERT(destroys_cpu_scratch); | |
| 789 } else { | |
| 790 ASSERT(to->IsDoubleRegister() || to->IsDoubleStackSlot()); | |
| 791 __ movdbl(ToOperand(to), xmm_scratch); | |
| 792 } | |
| 793 } else if (to == &marker_operand) { | |
| 794 if (from->IsRegister() || from->IsStackSlot()) { | |
| 795 __ mov(cpu_scratch, ToOperand(from)); | |
| 796 destroys_cpu_scratch = true; | |
| 797 } else { | |
| 798 ASSERT(from->IsDoubleRegister() || from->IsDoubleStackSlot()); | |
| 799 __ movdbl(xmm_scratch, ToOperand(from)); | |
| 800 } | |
| 801 } else if (from->IsRegister()) { | |
| 802 __ mov(ToOperand(to), ToRegister(from)); | |
| 803 } else if (to->IsRegister()) { | |
| 804 __ mov(ToRegister(to), ToOperand(from)); | |
| 805 } else if (from->IsStackSlot()) { | |
| 806 ASSERT(to->IsStackSlot()); | |
| 807 __ push(eax); | |
| 808 __ mov(eax, ToOperand(from)); | |
| 809 __ mov(ToOperand(to), eax); | |
| 810 __ pop(eax); | |
| 811 } else if (from->IsDoubleRegister()) { | |
| 812 __ movdbl(ToOperand(to), ToDoubleRegister(from)); | |
| 813 } else if (to->IsDoubleRegister()) { | |
| 814 __ movdbl(ToDoubleRegister(to), ToOperand(from)); | |
| 815 } else { | |
| 816 ASSERT(to->IsDoubleStackSlot() && from->IsDoubleStackSlot()); | |
| 817 __ movdbl(xmm_scratch, ToOperand(from)); | |
| 818 __ movdbl(ToOperand(to), xmm_scratch); | |
| 819 } | |
| 820 } | |
| 821 | |
| 822 if (destroys_cpu_scratch) { | |
| 823 __ mov(cpu_scratch, Operand(ebp, -kPointerSize)); | |
| 824 } | |
| 825 } | 623 } |
| 826 | 624 |
| 827 | 625 |
| 828 void LCodeGen::DoGap(LGap* gap) { | 626 void LCodeGen::DoGap(LGap* gap) { |
| 829 for (int i = LGap::FIRST_INNER_POSITION; | 627 for (int i = LGap::FIRST_INNER_POSITION; |
| 830 i <= LGap::LAST_INNER_POSITION; | 628 i <= LGap::LAST_INNER_POSITION; |
| 831 i++) { | 629 i++) { |
| 832 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 630 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); |
| 833 LParallelMove* move = gap->GetParallelMove(inner_pos); | 631 LParallelMove* move = gap->GetParallelMove(inner_pos); |
| 834 if (move != NULL) DoParallelMove(move); | 632 if (move != NULL) DoParallelMove(move); |
| (...skipping 2808 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3643 ASSERT(osr_pc_offset_ == -1); | 3441 ASSERT(osr_pc_offset_ == -1); |
| 3644 osr_pc_offset_ = masm()->pc_offset(); | 3442 osr_pc_offset_ = masm()->pc_offset(); |
| 3645 } | 3443 } |
| 3646 | 3444 |
| 3647 | 3445 |
| 3648 #undef __ | 3446 #undef __ |
| 3649 | 3447 |
| 3650 } } // namespace v8::internal | 3448 } } // namespace v8::internal |
| 3651 | 3449 |
| 3652 #endif // V8_TARGET_ARCH_IA32 | 3450 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |