OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
6 #include "src/compiler/code-generator-impl.h" | 6 #include "src/compiler/code-generator-impl.h" |
7 #include "src/compiler/gap-resolver.h" | 7 #include "src/compiler/gap-resolver.h" |
8 #include "src/compiler/node-matchers.h" | 8 #include "src/compiler/node-matchers.h" |
9 #include "src/compiler/node-properties-inl.h" | 9 #include "src/compiler/node-properties-inl.h" |
10 #include "src/mips/macro-assembler-mips.h" | 10 #include "src/mips/macro-assembler-mips.h" |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
147 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); | 147 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset)); |
148 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); | 148 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg)); |
149 } | 149 } |
150 | 150 |
151 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); | 151 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset)); |
152 __ Call(at); | 152 __ Call(at); |
153 AddSafepointAndDeopt(instr); | 153 AddSafepointAndDeopt(instr); |
154 break; | 154 break; |
155 } | 155 } |
156 case kArchJmp: | 156 case kArchJmp: |
157 __ Branch(GetLabel(i.InputRpo(0))); | 157 AssembleArchJump(i.InputRpo(0)); |
158 break; | 158 break; |
159 case kArchNop: | 159 case kArchNop: |
160 // don't emit code for nops. | 160 // don't emit code for nops. |
161 break; | 161 break; |
162 case kArchRet: | 162 case kArchRet: |
163 AssembleReturn(); | 163 AssembleReturn(); |
164 break; | 164 break; |
165 case kArchStackPointer: | 165 case kArchStackPointer: |
166 __ mov(i.OutputRegister(), sp); | 166 __ mov(i.OutputRegister(), sp); |
167 break; | 167 break; |
(...skipping 219 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
387 | 387 |
388 #define UNSUPPORTED_COND(opcode, condition) \ | 388 #define UNSUPPORTED_COND(opcode, condition) \ |
389 OFStream out(stdout); \ | 389 OFStream out(stdout); \ |
390 out << "Unsupported " << #opcode << " condition: \"" << condition << "\""; \ | 390 out << "Unsupported " << #opcode << " condition: \"" << condition << "\""; \ |
391 UNIMPLEMENTED(); | 391 UNIMPLEMENTED(); |
392 | 392 |
393 // Assembles branches after an instruction. | 393 // Assembles branches after an instruction. |
394 void CodeGenerator::AssembleArchBranch(Instruction* instr, | 394 void CodeGenerator::AssembleArchBranch(Instruction* instr, |
395 FlagsCondition condition) { | 395 FlagsCondition condition) { |
396 MipsOperandConverter i(this, instr); | 396 MipsOperandConverter i(this, instr); |
397 Label done; | 397 Label* tlabel = branch->true_label; |
398 | 398 Label* flabel = branch->false_label; |
399 // Emit a branch. The true and false targets are always the last two inputs | |
400 // to the instruction. | |
401 BasicBlock::RpoNumber tblock = | |
402 i.InputRpo(static_cast<int>(instr->InputCount()) - 2); | |
403 BasicBlock::RpoNumber fblock = | |
404 i.InputRpo(static_cast<int>(instr->InputCount()) - 1); | |
405 bool fallthru = IsNextInAssemblyOrder(fblock); | |
406 Label* tlabel = GetLabel(tblock); | |
407 Label* flabel = fallthru ? &done : GetLabel(fblock); | |
408 Condition cc = kNoCondition; | 399 Condition cc = kNoCondition; |
409 | 400 |
410 // MIPS does not have condition code flags, so compare and branch are | 401 // MIPS does not have condition code flags, so compare and branch are |
411 // implemented differently than on the other arch's. The compare operations | 402 // implemented differently than on the other arch's. The compare operations |
412 // emit mips psuedo-instructions, which are handled here by branch | 403 // emit mips pseudo-instructions, which are handled here by branch |
413 // instructions that do the actual comparison. Essential that the input | 404 // instructions that do the actual comparison. Essential that the input |
414 // registers to compare psuedo-op are not modified before this branch op, as | 405 // registers to compare pseudo-op are not modified before this branch op, as |
415 // they are tested here. | 406 // they are tested here. |
416 // TODO(plind): Add CHECK() to ensure that test/cmp and this branch were | 407 // TODO(plind): Add CHECK() to ensure that test/cmp and this branch were |
417 // not separated by other instructions. | 408 // not separated by other instructions. |
418 | 409 |
419 if (instr->arch_opcode() == kMipsTst) { | 410 if (instr->arch_opcode() == kMipsTst) { |
420 switch (condition) { | 411 switch (branch->condition) { |
421 case kNotEqual: | 412 case kNotEqual: |
422 cc = ne; | 413 cc = ne; |
423 break; | 414 break; |
424 case kEqual: | 415 case kEqual: |
425 cc = eq; | 416 cc = eq; |
426 break; | 417 break; |
427 default: | 418 default: |
428 UNSUPPORTED_COND(kMipsTst, condition); | 419 UNSUPPORTED_COND(kMipsTst, condition); |
429 break; | 420 break; |
430 } | 421 } |
431 __ And(at, i.InputRegister(0), i.InputOperand(1)); | 422 __ And(at, i.InputRegister(0), i.InputOperand(1)); |
432 __ Branch(tlabel, cc, at, Operand(zero_reg)); | 423 __ Branch(tlabel, cc, at, Operand(zero_reg)); |
433 | 424 |
434 } else if (instr->arch_opcode() == kMipsAddOvf || | 425 } else if (instr->arch_opcode() == kMipsAddOvf || |
435 instr->arch_opcode() == kMipsSubOvf) { | 426 instr->arch_opcode() == kMipsSubOvf) { |
436 // kMipsAddOvf, SubOvf emit negative result to 'kCompareReg' on overflow. | 427 // kMipsAddOvf, SubOvf emit negative result to 'kCompareReg' on overflow. |
437 switch (condition) { | 428 switch (branch->condition) { |
438 case kOverflow: | 429 case kOverflow: |
439 cc = lt; | 430 cc = lt; |
440 break; | 431 break; |
441 case kNotOverflow: | 432 case kNotOverflow: |
442 cc = ge; | 433 cc = ge; |
443 break; | 434 break; |
444 default: | 435 default: |
445 UNSUPPORTED_COND(kMipsAddOvf, condition); | 436 UNSUPPORTED_COND(kMipsAddOvf, branch->condition); |
446 break; | 437 break; |
447 } | 438 } |
448 __ Branch(tlabel, cc, kCompareReg, Operand(zero_reg)); | 439 __ Branch(tlabel, cc, kCompareReg, Operand(zero_reg)); |
449 | 440 |
450 } else if (instr->arch_opcode() == kMipsCmp) { | 441 } else if (instr->arch_opcode() == kMipsCmp) { |
451 switch (condition) { | 442 switch (branch->condition) { |
452 case kEqual: | 443 case kEqual: |
453 cc = eq; | 444 cc = eq; |
454 break; | 445 break; |
455 case kNotEqual: | 446 case kNotEqual: |
456 cc = ne; | 447 cc = ne; |
457 break; | 448 break; |
458 case kSignedLessThan: | 449 case kSignedLessThan: |
459 cc = lt; | 450 cc = lt; |
460 break; | 451 break; |
461 case kSignedGreaterThanOrEqual: | 452 case kSignedGreaterThanOrEqual: |
(...skipping 11 matching lines...) Expand all Loading... |
473 case kUnsignedGreaterThanOrEqual: | 464 case kUnsignedGreaterThanOrEqual: |
474 cc = hs; | 465 cc = hs; |
475 break; | 466 break; |
476 case kUnsignedLessThanOrEqual: | 467 case kUnsignedLessThanOrEqual: |
477 cc = ls; | 468 cc = ls; |
478 break; | 469 break; |
479 case kUnsignedGreaterThan: | 470 case kUnsignedGreaterThan: |
480 cc = hi; | 471 cc = hi; |
481 break; | 472 break; |
482 default: | 473 default: |
483 UNSUPPORTED_COND(kMipsCmp, condition); | 474 UNSUPPORTED_COND(kMipsCmp, branch->condition); |
484 break; | 475 break; |
485 } | 476 } |
486 __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); | 477 __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); |
487 | 478 |
488 if (!fallthru) __ Branch(flabel); // no fallthru to flabel. | 479 if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. |
489 __ bind(&done); | |
490 | 480 |
491 } else if (instr->arch_opcode() == kMipsCmpD) { | 481 } else if (instr->arch_opcode() == kMipsCmpD) { |
492 // TODO(dusmil) optimize unordered checks to use less instructions | 482 // TODO(dusmil) optimize unordered checks to use fewer instructions |
493 // even if we have to unfold BranchF macro. | 483 // even if we have to unfold BranchF macro. |
494 Label* nan = flabel; | 484 Label* nan = flabel; |
495 switch (condition) { | 485 switch (branch->condition) { |
496 case kUnorderedEqual: | 486 case kUnorderedEqual: |
497 cc = eq; | 487 cc = eq; |
498 break; | 488 break; |
499 case kUnorderedNotEqual: | 489 case kUnorderedNotEqual: |
500 cc = ne; | 490 cc = ne; |
501 nan = tlabel; | 491 nan = tlabel; |
502 break; | 492 break; |
503 case kUnorderedLessThan: | 493 case kUnorderedLessThan: |
504 cc = lt; | 494 cc = lt; |
505 break; | 495 break; |
506 case kUnorderedGreaterThanOrEqual: | 496 case kUnorderedGreaterThanOrEqual: |
507 cc = ge; | 497 cc = ge; |
508 nan = tlabel; | 498 nan = tlabel; |
509 break; | 499 break; |
510 case kUnorderedLessThanOrEqual: | 500 case kUnorderedLessThanOrEqual: |
511 cc = le; | 501 cc = le; |
512 break; | 502 break; |
513 case kUnorderedGreaterThan: | 503 case kUnorderedGreaterThan: |
514 cc = gt; | 504 cc = gt; |
515 nan = tlabel; | 505 nan = tlabel; |
516 break; | 506 break; |
517 default: | 507 default: |
518 UNSUPPORTED_COND(kMipsCmpD, condition); | 508 UNSUPPORTED_COND(kMipsCmpD, branch->condition); |
519 break; | 509 break; |
520 } | 510 } |
521 __ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0), | 511 __ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0), |
522 i.InputDoubleRegister(1)); | 512 i.InputDoubleRegister(1)); |
523 | 513 |
524 if (!fallthru) __ Branch(flabel); // no fallthru to flabel. | 514 if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. |
525 __ bind(&done); | |
526 | 515 |
527 } else { | 516 } else { |
528 PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n", | 517 PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n", |
529 instr->arch_opcode()); | 518 instr->arch_opcode()); |
530 UNIMPLEMENTED(); | 519 UNIMPLEMENTED(); |
531 } | 520 } |
532 } | 521 } |
533 | 522 |
534 | 523 |
| 524 void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { |
| 525 if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target)); |
| 526 } |
| 527 |
| 528 |
535 // Assembles boolean materializations after an instruction. | 529 // Assembles boolean materializations after an instruction. |
536 void CodeGenerator::AssembleArchBoolean(Instruction* instr, | 530 void CodeGenerator::AssembleArchBoolean(Instruction* instr, |
537 FlagsCondition condition) { | 531 FlagsCondition condition) { |
538 MipsOperandConverter i(this, instr); | 532 MipsOperandConverter i(this, instr); |
539 Label done; | 533 Label done; |
540 | 534 |
541 // Materialize a full 32-bit 1 or 0 value. The result register is always the | 535 // Materialize a full 32-bit 1 or 0 value. The result register is always the |
542 // last output of the instruction. | 536 // last output of the instruction. |
543 Label false_value; | 537 Label false_value; |
544 DCHECK_NE(0, instr->OutputCount()); | 538 DCHECK_NE(0, instr->OutputCount()); |
(...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
943 } | 937 } |
944 } | 938 } |
945 MarkLazyDeoptSite(); | 939 MarkLazyDeoptSite(); |
946 } | 940 } |
947 | 941 |
948 #undef __ | 942 #undef __ |
949 | 943 |
950 } // namespace compiler | 944 } // namespace compiler |
951 } // namespace internal | 945 } // namespace internal |
952 } // namespace v8 | 946 } // namespace v8 |
OLD | NEW |