| OLD | NEW | 
|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. | 
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without | 
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are | 
| 4 // met: | 4 // met: | 
| 5 // | 5 // | 
| 6 //     * Redistributions of source code must retain the above copyright | 6 //     * Redistributions of source code must retain the above copyright | 
| 7 //       notice, this list of conditions and the following disclaimer. | 7 //       notice, this list of conditions and the following disclaimer. | 
| 8 //     * Redistributions in binary form must reproduce the above | 8 //     * Redistributions in binary form must reproduce the above | 
| 9 //       copyright notice, this list of conditions and the following | 9 //       copyright notice, this list of conditions and the following | 
| 10 //       disclaimer in the documentation and/or other materials provided | 10 //       disclaimer in the documentation and/or other materials provided | 
| (...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 148 } | 148 } | 
| 149 | 149 | 
| 150 | 150 | 
| 151 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) { | 151 bool LGapResolver::CanReach(LGapNode* a, LGapNode* b) { | 
| 152   ASSERT(a != b); | 152   ASSERT(a != b); | 
| 153   return CanReach(a, b, next_visited_id_++); | 153   return CanReach(a, b, next_visited_id_++); | 
| 154 } | 154 } | 
| 155 | 155 | 
| 156 | 156 | 
| 157 void LGapResolver::RegisterMove(LMoveOperands move) { | 157 void LGapResolver::RegisterMove(LMoveOperands move) { | 
| 158   if (move.from()->IsConstantOperand()) { | 158   if (move.source()->IsConstantOperand()) { | 
| 159     // Constant moves should be last in the machine code. Therefore add them | 159     // Constant moves should be last in the machine code. Therefore add them | 
| 160     // first to the result set. | 160     // first to the result set. | 
| 161     AddResultMove(move.from(), move.to()); | 161     AddResultMove(move.source(), move.destination()); | 
| 162   } else { | 162   } else { | 
| 163     LGapNode* from = LookupNode(move.from()); | 163     LGapNode* from = LookupNode(move.source()); | 
| 164     LGapNode* to = LookupNode(move.to()); | 164     LGapNode* to = LookupNode(move.destination()); | 
| 165     if (to->IsAssigned() && to->assigned_from() == from) { | 165     if (to->IsAssigned() && to->assigned_from() == from) { | 
| 166       move.Eliminate(); | 166       move.Eliminate(); | 
| 167       return; | 167       return; | 
| 168     } | 168     } | 
| 169     ASSERT(!to->IsAssigned()); | 169     ASSERT(!to->IsAssigned()); | 
| 170     if (CanReach(from, to)) { | 170     if (CanReach(from, to)) { | 
| 171       // This introduces a cycle. Save. | 171       // This introduces a cycle. Save. | 
| 172       identified_cycles_.Add(from); | 172       identified_cycles_.Add(from); | 
| 173     } | 173     } | 
| 174     to->set_assigned_from(from); | 174     to->set_assigned_from(from); | 
| (...skipping 469 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 644   // xmm0 must always be a scratch register. | 644   // xmm0 must always be a scratch register. | 
| 645   XMMRegister xmm_scratch = xmm0; | 645   XMMRegister xmm_scratch = xmm0; | 
| 646   LUnallocated marker_operand(LUnallocated::NONE); | 646   LUnallocated marker_operand(LUnallocated::NONE); | 
| 647 | 647 | 
| 648   Register cpu_scratch = kScratchRegister; | 648   Register cpu_scratch = kScratchRegister; | 
| 649 | 649 | 
| 650   const ZoneList<LMoveOperands>* moves = | 650   const ZoneList<LMoveOperands>* moves = | 
| 651       resolver_.Resolve(move->move_operands(), &marker_operand); | 651       resolver_.Resolve(move->move_operands(), &marker_operand); | 
| 652   for (int i = moves->length() - 1; i >= 0; --i) { | 652   for (int i = moves->length() - 1; i >= 0; --i) { | 
| 653     LMoveOperands move = moves->at(i); | 653     LMoveOperands move = moves->at(i); | 
| 654     LOperand* from = move.from(); | 654     LOperand* from = move.source(); | 
| 655     LOperand* to = move.to(); | 655     LOperand* to = move.destination(); | 
| 656     ASSERT(!from->IsDoubleRegister() || | 656     ASSERT(!from->IsDoubleRegister() || | 
| 657            !ToDoubleRegister(from).is(xmm_scratch)); | 657            !ToDoubleRegister(from).is(xmm_scratch)); | 
| 658     ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); | 658     ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); | 
| 659     ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); | 659     ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); | 
| 660     ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); | 660     ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); | 
| 661     if (from->IsConstantOperand()) { | 661     if (from->IsConstantOperand()) { | 
| 662       LConstantOperand* constant_from = LConstantOperand::cast(from); | 662       LConstantOperand* constant_from = LConstantOperand::cast(from); | 
| 663       if (to->IsRegister()) { | 663       if (to->IsRegister()) { | 
| 664         if (IsInteger32Constant(constant_from)) { | 664         if (IsInteger32Constant(constant_from)) { | 
| 665           __ movl(ToRegister(to), Immediate(ToInteger32(constant_from))); | 665           __ movl(ToRegister(to), Immediate(ToInteger32(constant_from))); | 
| (...skipping 795 matching lines...) Expand 10 before | Expand all | Expand 10 after  Loading... | 
| 1461 | 1461 | 
| 1462 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 1462 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 
| 1463   Abort("Unimplemented: %s", "DoOsrEntry"); | 1463   Abort("Unimplemented: %s", "DoOsrEntry"); | 
| 1464 } | 1464 } | 
| 1465 | 1465 | 
| 1466 #undef __ | 1466 #undef __ | 
| 1467 | 1467 | 
| 1468 } }  // namespace v8::internal | 1468 } }  // namespace v8::internal | 
| 1469 | 1469 | 
| 1470 #endif  // V8_TARGET_ARCH_X64 | 1470 #endif  // V8_TARGET_ARCH_X64 | 
| OLD | NEW | 
|---|