OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
163 return ToRegister(op->index()); | 163 return ToRegister(op->index()); |
164 } | 164 } |
165 | 165 |
166 | 166 |
167 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const { | 167 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const { |
168 ASSERT(op->IsDoubleRegister()); | 168 ASSERT(op->IsDoubleRegister()); |
169 return ToDoubleRegister(op->index()); | 169 return ToDoubleRegister(op->index()); |
170 } | 170 } |
171 | 171 |
172 | 172 |
| 173 bool LCodeGen::IsInteger32Constant(LConstantOperand* op) const { |
| 174 return op->IsConstantOperand() && |
| 175 chunk_->LookupLiteralRepresentation(op).IsInteger32(); |
| 176 } |
| 177 |
| 178 |
| 179 bool LCodeGen::IsTaggedConstant(LConstantOperand* op) const { |
| 180 return op->IsConstantOperand() && |
| 181 chunk_->LookupLiteralRepresentation(op).IsTagged(); |
| 182 } |
| 183 |
| 184 |
173 int LCodeGen::ToInteger32(LConstantOperand* op) const { | 185 int LCodeGen::ToInteger32(LConstantOperand* op) const { |
174 Handle<Object> value = chunk_->LookupLiteral(op); | 186 Handle<Object> value = chunk_->LookupLiteral(op); |
175 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); | 187 ASSERT(chunk_->LookupLiteralRepresentation(op).IsInteger32()); |
176 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) == | 188 ASSERT(static_cast<double>(static_cast<int32_t>(value->Number())) == |
177 value->Number()); | 189 value->Number()); |
178 return static_cast<int32_t>(value->Number()); | 190 return static_cast<int32_t>(value->Number()); |
179 } | 191 } |
180 | 192 |
181 | 193 |
| 194 Handle<Object> LCodeGen::ToHandle(LConstantOperand* op) const { |
| 195 Handle<Object> literal = chunk_->LookupLiteral(op); |
| 196 Representation r = chunk_->LookupLiteralRepresentation(op); |
| 197 ASSERT(r.IsTagged()); |
| 198 return literal; |
| 199 } |
| 200 |
| 201 |
182 Operand LCodeGen::ToOperand(LOperand* op) const { | 202 Operand LCodeGen::ToOperand(LOperand* op) const { |
183 // Does not handle registers. In X64 assembler, plain registers are not | 203 // Does not handle registers. In X64 assembler, plain registers are not |
184 // representable as an Operand. | 204 // representable as an Operand. |
185 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); | 205 ASSERT(op->IsStackSlot() || op->IsDoubleStackSlot()); |
186 int index = op->index(); | 206 int index = op->index(); |
187 if (index >= 0) { | 207 if (index >= 0) { |
188 // Local or spill slot. Skip the frame pointer, function, and | 208 // Local or spill slot. Skip the frame pointer, function, and |
189 // context in the fixed part of the frame. | 209 // context in the fixed part of the frame. |
190 return Operand(rbp, -(index + 3) * kPointerSize); | 210 return Operand(rbp, -(index + 3) * kPointerSize); |
191 } else { | 211 } else { |
(...skipping 227 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
419 } else { | 439 } else { |
420 Comment(";;; B%d", label->block_id()); | 440 Comment(";;; B%d", label->block_id()); |
421 } | 441 } |
422 __ bind(label->label()); | 442 __ bind(label->label()); |
423 current_block_ = label->block_id(); | 443 current_block_ = label->block_id(); |
424 LCodeGen::DoGap(label); | 444 LCodeGen::DoGap(label); |
425 } | 445 } |
426 | 446 |
427 | 447 |
428 void LCodeGen::DoParallelMove(LParallelMove* move) { | 448 void LCodeGen::DoParallelMove(LParallelMove* move) { |
429 Abort("Unimplemented: %s", "DoParallelMove"); | 449 // xmm0 must always be a scratch register. |
| 450 XMMRegister xmm_scratch = xmm0; |
| 451 LUnallocated marker_operand(LUnallocated::NONE); |
| 452 |
| 453 Register cpu_scratch = kScratchRegister; |
| 454 |
| 455 const ZoneList<LMoveOperands>* moves = |
| 456 resolver_.Resolve(move->move_operands(), &marker_operand); |
| 457 for (int i = moves->length() - 1; i >= 0; --i) { |
| 458 LMoveOperands move = moves->at(i); |
| 459 LOperand* from = move.from(); |
| 460 LOperand* to = move.to(); |
| 461 ASSERT(!from->IsDoubleRegister() || |
| 462 !ToDoubleRegister(from).is(xmm_scratch)); |
| 463 ASSERT(!to->IsDoubleRegister() || !ToDoubleRegister(to).is(xmm_scratch)); |
| 464 ASSERT(!from->IsRegister() || !ToRegister(from).is(cpu_scratch)); |
| 465 ASSERT(!to->IsRegister() || !ToRegister(to).is(cpu_scratch)); |
| 466 if (from->IsConstantOperand()) { |
| 467 LConstantOperand* constant_from = LConstantOperand::cast(from); |
| 468 if (to->IsRegister()) { |
| 469 if (IsInteger32Constant(constant_from)) { |
| 470 __ movl(ToRegister(to), Immediate(ToInteger32(constant_from))); |
| 471 } else { |
| 472 __ Move(ToRegister(to), ToHandle(constant_from)); |
| 473 } |
| 474 } else { |
| 475 if (IsInteger32Constant(constant_from)) { |
| 476 __ movl(ToOperand(to), Immediate(ToInteger32(constant_from))); |
| 477 } else { |
| 478 __ Move(ToOperand(to), ToHandle(constant_from)); |
| 479 } |
| 480 } |
| 481 } else if (from == &marker_operand) { |
| 482 if (to->IsRegister()) { |
| 483 __ movq(ToRegister(to), cpu_scratch); |
| 484 } else if (to->IsStackSlot()) { |
| 485 __ movq(ToOperand(to), cpu_scratch); |
| 486 } else if (to->IsDoubleRegister()) { |
| 487 __ movsd(ToDoubleRegister(to), xmm_scratch); |
| 488 } else { |
| 489 ASSERT(to->IsDoubleStackSlot()); |
| 490 __ movsd(ToOperand(to), xmm_scratch); |
| 491 } |
| 492 } else if (to == &marker_operand) { |
| 493 if (from->IsRegister()) { |
| 494 __ movq(cpu_scratch, ToRegister(from)); |
| 495 } else if (from->IsStackSlot()) { |
| 496 __ movq(cpu_scratch, ToOperand(from)); |
| 497 } else if (from->IsDoubleRegister()) { |
| 498 __ movsd(xmm_scratch, ToDoubleRegister(from)); |
| 499 } else { |
| 500 ASSERT(from->IsDoubleStackSlot()); |
| 501 __ movsd(xmm_scratch, ToOperand(from)); |
| 502 } |
| 503 } else if (from->IsRegister()) { |
| 504 if (to->IsRegister()) { |
| 505 __ movq(ToRegister(to), ToRegister(from)); |
| 506 } else { |
| 507 __ movq(ToOperand(to), ToRegister(from)); |
| 508 } |
| 509 } else if (to->IsRegister()) { |
| 510 __ movq(ToRegister(to), ToOperand(from)); |
| 511 } else if (from->IsStackSlot()) { |
| 512 ASSERT(to->IsStackSlot()); |
| 513 __ push(rax); |
| 514 __ movq(rax, ToOperand(from)); |
| 515 __ movq(ToOperand(to), rax); |
| 516 __ pop(rax); |
| 517 } else if (from->IsDoubleRegister()) { |
| 518 ASSERT(to->IsDoubleStackSlot()); |
| 519 __ movsd(ToOperand(to), ToDoubleRegister(from)); |
| 520 } else if (to->IsDoubleRegister()) { |
| 521 ASSERT(from->IsDoubleStackSlot()); |
| 522 __ movsd(ToDoubleRegister(to), ToOperand(from)); |
| 523 } else { |
| 524 ASSERT(to->IsDoubleStackSlot() && from->IsDoubleStackSlot()); |
| 525 __ movsd(xmm_scratch, ToOperand(from)); |
| 526 __ movsd(ToOperand(to), xmm_scratch); |
| 527 } |
| 528 } |
430 } | 529 } |
431 | 530 |
432 | 531 |
433 void LCodeGen::DoGap(LGap* gap) { | 532 void LCodeGen::DoGap(LGap* gap) { |
434 for (int i = LGap::FIRST_INNER_POSITION; | 533 for (int i = LGap::FIRST_INNER_POSITION; |
435 i <= LGap::LAST_INNER_POSITION; | 534 i <= LGap::LAST_INNER_POSITION; |
436 i++) { | 535 i++) { |
437 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); | 536 LGap::InnerPosition inner_pos = static_cast<LGap::InnerPosition>(i); |
438 LParallelMove* move = gap->GetParallelMove(inner_pos); | 537 LParallelMove* move = gap->GetParallelMove(inner_pos); |
439 if (move != NULL) DoParallelMove(move); | 538 if (move != NULL) DoParallelMove(move); |
(...skipping 719 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1159 | 1258 |
1160 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { | 1259 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { |
1161 Abort("Unimplemented: %s", "DoOsrEntry"); | 1260 Abort("Unimplemented: %s", "DoOsrEntry"); |
1162 } | 1261 } |
1163 | 1262 |
1164 #undef __ | 1263 #undef __ |
1165 | 1264 |
1166 } } // namespace v8::internal | 1265 } } // namespace v8::internal |
1167 | 1266 |
1168 #endif // V8_TARGET_ARCH_X64 | 1267 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |