OLD | NEW |
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
6 // met: | 6 // met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 246 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
257 | 257 |
258 trampoline_emitted_ = FLAG_force_long_branches; | 258 trampoline_emitted_ = FLAG_force_long_branches; |
259 unbound_labels_count_ = 0; | 259 unbound_labels_count_ = 0; |
260 block_buffer_growth_ = false; | 260 block_buffer_growth_ = false; |
261 | 261 |
262 ClearRecordedAstId(); | 262 ClearRecordedAstId(); |
263 } | 263 } |
264 | 264 |
265 | 265 |
266 void Assembler::GetCode(CodeDesc* desc) { | 266 void Assembler::GetCode(CodeDesc* desc) { |
| 267 EmitForbiddenSlotInstruction(); |
267 DCHECK(pc_ <= reloc_info_writer.pos()); // No overlap. | 268 DCHECK(pc_ <= reloc_info_writer.pos()); // No overlap. |
268 // Set up code descriptor. | 269 // Set up code descriptor. |
269 desc->buffer = buffer_; | 270 desc->buffer = buffer_; |
270 desc->buffer_size = buffer_size_; | 271 desc->buffer_size = buffer_size_; |
271 desc->instr_size = pc_offset(); | 272 desc->instr_size = pc_offset(); |
272 desc->reloc_size = | 273 desc->reloc_size = |
273 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos()); | 274 static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos()); |
274 desc->origin = this; | 275 desc->origin = this; |
275 desc->constant_pool_size = 0; | 276 desc->constant_pool_size = 0; |
276 } | 277 } |
277 | 278 |
278 | 279 |
279 void Assembler::Align(int m) { | 280 void Assembler::Align(int m) { |
280 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); | 281 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); |
| 282 EmitForbiddenSlotInstruction(); |
281 while ((pc_offset() & (m - 1)) != 0) { | 283 while ((pc_offset() & (m - 1)) != 0) { |
282 nop(); | 284 nop(); |
283 } | 285 } |
284 } | 286 } |
285 | 287 |
286 | 288 |
287 void Assembler::CodeTargetAlign() { | 289 void Assembler::CodeTargetAlign() { |
288 // No advantage to aligning branch/call targets to more than | 290 // No advantage to aligning branch/call targets to more than |
289 // single instruction, that I am aware of. | 291 // single instruction, that I am aware of. |
290 Align(4); | 292 Align(4); |
(...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
427 const int kEndOfChain = -4; | 429 const int kEndOfChain = -4; |
428 // Determines the end of the Jump chain (a subset of the label link chain). | 430 // Determines the end of the Jump chain (a subset of the label link chain). |
429 const int kEndOfJumpChain = 0; | 431 const int kEndOfJumpChain = 0; |
430 | 432 |
431 | 433 |
432 bool Assembler::IsBranch(Instr instr) { | 434 bool Assembler::IsBranch(Instr instr) { |
433 uint32_t opcode = GetOpcodeField(instr); | 435 uint32_t opcode = GetOpcodeField(instr); |
434 uint32_t rt_field = GetRtField(instr); | 436 uint32_t rt_field = GetRtField(instr); |
435 uint32_t rs_field = GetRsField(instr); | 437 uint32_t rs_field = GetRsField(instr); |
436 // Checks if the instruction is a branch. | 438 // Checks if the instruction is a branch. |
437 return opcode == BEQ || | 439 bool isBranch = |
438 opcode == BNE || | 440 opcode == BEQ || opcode == BNE || opcode == BLEZ || opcode == BGTZ || |
439 opcode == BLEZ || | 441 opcode == BEQL || opcode == BNEL || opcode == BLEZL || opcode == BGTZL || |
440 opcode == BGTZ || | |
441 opcode == BEQL || | |
442 opcode == BNEL || | |
443 opcode == BLEZL || | |
444 opcode == BGTZL || | |
445 (opcode == REGIMM && (rt_field == BLTZ || rt_field == BGEZ || | 442 (opcode == REGIMM && (rt_field == BLTZ || rt_field == BGEZ || |
446 rt_field == BLTZAL || rt_field == BGEZAL)) || | 443 rt_field == BLTZAL || rt_field == BGEZAL)) || |
447 (opcode == COP1 && rs_field == BC1) || // Coprocessor branch. | 444 (opcode == COP1 && rs_field == BC1) || // Coprocessor branch. |
448 (opcode == COP1 && rs_field == BC1EQZ) || | 445 (opcode == COP1 && rs_field == BC1EQZ) || |
449 (opcode == COP1 && rs_field == BC1NEZ); | 446 (opcode == COP1 && rs_field == BC1NEZ); |
| 447 if (!isBranch && kArchVariant == kMips64r6) { |
| 448 // All the 3 variants of POP10 (BOVC, BEQC, BEQZALC) and |
| 449 // POP30 (BNVC, BNEC, BNEZALC) are branch ops. |
| 450 isBranch |= opcode == POP10 || opcode == POP30 || opcode == BC || |
| 451 opcode == BALC || |
| 452 (opcode == POP66 && rs_field != 0) || // BEQZC |
| 453 (opcode == POP76 && rs_field != 0); // BNEZC |
| 454 } |
| 455 return isBranch; |
450 } | 456 } |
451 | 457 |
452 | 458 |
| 459 bool Assembler::IsBc(Instr instr) { |
| 460 uint32_t opcode = GetOpcodeField(instr); |
| 461 // Checks if the instruction is a BC or BALC. |
| 462 return opcode == BC || opcode == BALC; |
| 463 } |
| 464 |
| 465 |
| 466 bool Assembler::IsBzc(Instr instr) { |
| 467 uint32_t opcode = GetOpcodeField(instr); |
| 468 // Checks if the instruction is BEQZC or BNEZC. |
| 469 return (opcode == POP66 && GetRsField(instr) != 0) || |
| 470 (opcode == POP76 && GetRsField(instr) != 0); |
| 471 } |
| 472 |
| 473 |
453 bool Assembler::IsEmittedConstant(Instr instr) { | 474 bool Assembler::IsEmittedConstant(Instr instr) { |
454 uint32_t label_constant = GetLabelConst(instr); | 475 uint32_t label_constant = GetLabelConst(instr); |
455 return label_constant == 0; // Emitted label const in reg-exp engine. | 476 return label_constant == 0; // Emitted label const in reg-exp engine. |
456 } | 477 } |
457 | 478 |
458 | 479 |
459 bool Assembler::IsBeq(Instr instr) { | 480 bool Assembler::IsBeq(Instr instr) { |
460 return GetOpcodeField(instr) == BEQ; | 481 return GetOpcodeField(instr) == BEQ; |
461 } | 482 } |
462 | 483 |
463 | 484 |
464 bool Assembler::IsBne(Instr instr) { | 485 bool Assembler::IsBne(Instr instr) { |
465 return GetOpcodeField(instr) == BNE; | 486 return GetOpcodeField(instr) == BNE; |
466 } | 487 } |
467 | 488 |
468 | 489 |
| 490 bool Assembler::IsBeqzc(Instr instr) { |
| 491 uint32_t opcode = GetOpcodeField(instr); |
| 492 return opcode == POP66 && GetRsField(instr) != 0; |
| 493 } |
| 494 |
| 495 |
| 496 bool Assembler::IsBnezc(Instr instr) { |
| 497 uint32_t opcode = GetOpcodeField(instr); |
| 498 return opcode == POP76 && GetRsField(instr) != 0; |
| 499 } |
| 500 |
| 501 |
| 502 bool Assembler::IsBeqc(Instr instr) { |
| 503 uint32_t opcode = GetOpcodeField(instr); |
| 504 uint32_t rs = GetRsField(instr); |
| 505 uint32_t rt = GetRtField(instr); |
| 506 return opcode == POP10 && rs != 0 && rs < rt; // && rt != 0 |
| 507 } |
| 508 |
| 509 |
| 510 bool Assembler::IsBnec(Instr instr) { |
| 511 uint32_t opcode = GetOpcodeField(instr); |
| 512 uint32_t rs = GetRsField(instr); |
| 513 uint32_t rt = GetRtField(instr); |
| 514 return opcode == POP30 && rs != 0 && rs < rt; // && rt != 0 |
| 515 } |
| 516 |
| 517 |
469 bool Assembler::IsJump(Instr instr) { | 518 bool Assembler::IsJump(Instr instr) { |
470 uint32_t opcode = GetOpcodeField(instr); | 519 uint32_t opcode = GetOpcodeField(instr); |
471 uint32_t rt_field = GetRtField(instr); | 520 uint32_t rt_field = GetRtField(instr); |
472 uint32_t rd_field = GetRdField(instr); | 521 uint32_t rd_field = GetRdField(instr); |
473 uint32_t function_field = GetFunctionField(instr); | 522 uint32_t function_field = GetFunctionField(instr); |
474 // Checks if the instruction is a jump. | 523 // Checks if the instruction is a jump. |
475 return opcode == J || opcode == JAL || | 524 return opcode == J || opcode == JAL || |
476 (opcode == SPECIAL && rt_field == 0 && | 525 (opcode == SPECIAL && rt_field == 0 && |
477 ((function_field == JALR) || (rd_field == 0 && (function_field == JR)))); | 526 ((function_field == JALR) || (rd_field == 0 && (function_field == JR)))); |
478 } | 527 } |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
538 } | 587 } |
539 | 588 |
540 | 589 |
541 int32_t Assembler::GetBranchOffset(Instr instr) { | 590 int32_t Assembler::GetBranchOffset(Instr instr) { |
542 DCHECK(IsBranch(instr)); | 591 DCHECK(IsBranch(instr)); |
543 return (static_cast<int16_t>(instr & kImm16Mask)) << 2; | 592 return (static_cast<int16_t>(instr & kImm16Mask)) << 2; |
544 } | 593 } |
545 | 594 |
546 | 595 |
547 bool Assembler::IsLw(Instr instr) { | 596 bool Assembler::IsLw(Instr instr) { |
548 return ((instr & kOpcodeMask) == LW); | 597 return (static_cast<uint32_t>(instr & kOpcodeMask) == LW); |
549 } | 598 } |
550 | 599 |
551 | 600 |
552 int16_t Assembler::GetLwOffset(Instr instr) { | 601 int16_t Assembler::GetLwOffset(Instr instr) { |
553 DCHECK(IsLw(instr)); | 602 DCHECK(IsLw(instr)); |
554 return ((instr & kImm16Mask)); | 603 return ((instr & kImm16Mask)); |
555 } | 604 } |
556 | 605 |
557 | 606 |
558 Instr Assembler::SetLwOffset(Instr instr, int16_t offset) { | 607 Instr Assembler::SetLwOffset(Instr instr, int16_t offset) { |
559 DCHECK(IsLw(instr)); | 608 DCHECK(IsLw(instr)); |
560 | 609 |
561 // We actually create a new lw instruction based on the original one. | 610 // We actually create a new lw instruction based on the original one. |
562 Instr temp_instr = LW | (instr & kRsFieldMask) | (instr & kRtFieldMask) | 611 Instr temp_instr = LW | (instr & kRsFieldMask) | (instr & kRtFieldMask) |
563 | (offset & kImm16Mask); | 612 | (offset & kImm16Mask); |
564 | 613 |
565 return temp_instr; | 614 return temp_instr; |
566 } | 615 } |
567 | 616 |
568 | 617 |
569 bool Assembler::IsSw(Instr instr) { | 618 bool Assembler::IsSw(Instr instr) { |
570 return ((instr & kOpcodeMask) == SW); | 619 return (static_cast<uint32_t>(instr & kOpcodeMask) == SW); |
571 } | 620 } |
572 | 621 |
573 | 622 |
574 Instr Assembler::SetSwOffset(Instr instr, int16_t offset) { | 623 Instr Assembler::SetSwOffset(Instr instr, int16_t offset) { |
575 DCHECK(IsSw(instr)); | 624 DCHECK(IsSw(instr)); |
576 return ((instr & ~kImm16Mask) | (offset & kImm16Mask)); | 625 return ((instr & ~kImm16Mask) | (offset & kImm16Mask)); |
577 } | 626 } |
578 | 627 |
579 | 628 |
580 bool Assembler::IsAddImmediate(Instr instr) { | 629 bool Assembler::IsAddImmediate(Instr instr) { |
581 return ((instr & kOpcodeMask) == ADDIU || (instr & kOpcodeMask) == DADDIU); | 630 return ((instr & kOpcodeMask) == ADDIU || (instr & kOpcodeMask) == DADDIU); |
582 } | 631 } |
583 | 632 |
584 | 633 |
585 Instr Assembler::SetAddImmediateOffset(Instr instr, int16_t offset) { | 634 Instr Assembler::SetAddImmediateOffset(Instr instr, int16_t offset) { |
586 DCHECK(IsAddImmediate(instr)); | 635 DCHECK(IsAddImmediate(instr)); |
587 return ((instr & ~kImm16Mask) | (offset & kImm16Mask)); | 636 return ((instr & ~kImm16Mask) | (offset & kImm16Mask)); |
588 } | 637 } |
589 | 638 |
590 | 639 |
591 bool Assembler::IsAndImmediate(Instr instr) { | 640 bool Assembler::IsAndImmediate(Instr instr) { |
592 return GetOpcodeField(instr) == ANDI; | 641 return GetOpcodeField(instr) == ANDI; |
593 } | 642 } |
594 | 643 |
595 | 644 |
| 645 static Assembler::OffsetSize OffsetSizeInBits(Instr instr) { |
| 646 if (kArchVariant == kMips64r6) { |
| 647 if (Assembler::IsBc(instr)) { |
| 648 return Assembler::OffsetSize::kOffset26; |
| 649 } else if (Assembler::IsBzc(instr)) { |
| 650 return Assembler::OffsetSize::kOffset21; |
| 651 } |
| 652 } |
| 653 return Assembler::OffsetSize::kOffset16; |
| 654 } |
| 655 |
| 656 |
| 657 static inline int32_t AddBranchOffset(int pos, Instr instr) { |
| 658 int bits = OffsetSizeInBits(instr); |
| 659 const int32_t mask = (1 << bits) - 1; |
| 660 bits = 32 - bits; |
| 661 |
| 662 // Do NOT change this to <<2. We rely on arithmetic shifts here, assuming |
| 663 // the compiler uses arithmetic shifts for signed integers. |
| 664 int32_t imm = ((instr & mask) << bits) >> (bits - 2); |
| 665 |
| 666 if (imm == kEndOfChain) { |
| 667 // EndOfChain sentinel is returned directly, not relative to pc or pos. |
| 668 return kEndOfChain; |
| 669 } else { |
| 670 return pos + Assembler::kBranchPCOffset + imm; |
| 671 } |
| 672 } |
| 673 |
| 674 |
596 int Assembler::target_at(int pos, bool is_internal) { | 675 int Assembler::target_at(int pos, bool is_internal) { |
597 if (is_internal) { | 676 if (is_internal) { |
598 int64_t* p = reinterpret_cast<int64_t*>(buffer_ + pos); | 677 int64_t* p = reinterpret_cast<int64_t*>(buffer_ + pos); |
599 int64_t address = *p; | 678 int64_t address = *p; |
600 if (address == kEndOfJumpChain) { | 679 if (address == kEndOfJumpChain) { |
601 return kEndOfChain; | 680 return kEndOfChain; |
602 } else { | 681 } else { |
603 int64_t instr_address = reinterpret_cast<int64_t>(p); | 682 int64_t instr_address = reinterpret_cast<int64_t>(p); |
604 DCHECK(instr_address - address < INT_MAX); | 683 DCHECK(instr_address - address < INT_MAX); |
605 int delta = static_cast<int>(instr_address - address); | 684 int delta = static_cast<int>(instr_address - address); |
606 DCHECK(pos > delta); | 685 DCHECK(pos > delta); |
607 return pos - delta; | 686 return pos - delta; |
608 } | 687 } |
609 } | 688 } |
610 Instr instr = instr_at(pos); | 689 Instr instr = instr_at(pos); |
611 if ((instr & ~kImm16Mask) == 0) { | 690 if ((instr & ~kImm16Mask) == 0) { |
612 // Emitted label constant, not part of a branch. | 691 // Emitted label constant, not part of a branch. |
613 if (instr == 0) { | 692 if (instr == 0) { |
614 return kEndOfChain; | 693 return kEndOfChain; |
615 } else { | 694 } else { |
616 int32_t imm18 =((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; | 695 int32_t imm18 =((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; |
617 return (imm18 + pos); | 696 return (imm18 + pos); |
618 } | 697 } |
619 } | 698 } |
620 // Check we have a branch or jump instruction. | 699 // Check we have a branch or jump instruction. |
621 DCHECK(IsBranch(instr) || IsJ(instr) || IsJal(instr) || IsLui(instr)); | 700 DCHECK(IsBranch(instr) || IsJ(instr) || IsJal(instr) || IsLui(instr)); |
622 // Do NOT change this to <<2. We rely on arithmetic shifts here, assuming | 701 // Do NOT change this to <<2. We rely on arithmetic shifts here, assuming |
623 // the compiler uses arithmetic shifts for signed integers. | 702 // the compiler uses arithmetic shifts for signed integers. |
624 if (IsBranch(instr)) { | 703 if (IsBranch(instr)) { |
625 int32_t imm18 = ((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; | 704 return AddBranchOffset(pos, instr); |
626 if (imm18 == kEndOfChain) { | |
627 // EndOfChain sentinel is returned directly, not relative to pc or pos. | |
628 return kEndOfChain; | |
629 } else { | |
630 return pos + kBranchPCOffset + imm18; | |
631 } | |
632 } else if (IsLui(instr)) { | 705 } else if (IsLui(instr)) { |
633 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize); | 706 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize); |
634 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize); | 707 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize); |
635 Instr instr_ori2 = instr_at(pos + 3 * Assembler::kInstrSize); | 708 Instr instr_ori2 = instr_at(pos + 3 * Assembler::kInstrSize); |
636 DCHECK(IsOri(instr_ori)); | 709 DCHECK(IsOri(instr_ori)); |
637 DCHECK(IsOri(instr_ori2)); | 710 DCHECK(IsOri(instr_ori2)); |
638 | 711 |
639 // TODO(plind) create named constants for shift values. | 712 // TODO(plind) create named constants for shift values. |
640 int64_t imm = static_cast<int64_t>(instr_lui & kImm16Mask) << 48; | 713 int64_t imm = static_cast<int64_t>(instr_lui & kImm16Mask) << 48; |
641 imm |= static_cast<int64_t>(instr_ori & kImm16Mask) << 32; | 714 imm |= static_cast<int64_t>(instr_ori & kImm16Mask) << 32; |
(...skipping 19 matching lines...) Expand all Loading... |
661 return kEndOfChain; | 734 return kEndOfChain; |
662 } else { | 735 } else { |
663 // Sign extend 28-bit offset. | 736 // Sign extend 28-bit offset. |
664 int32_t delta = static_cast<int32_t>((imm28 << 4) >> 4); | 737 int32_t delta = static_cast<int32_t>((imm28 << 4) >> 4); |
665 return pos + delta; | 738 return pos + delta; |
666 } | 739 } |
667 } | 740 } |
668 } | 741 } |
669 | 742 |
670 | 743 |
| 744 static inline Instr SetBranchOffset(int32_t pos, int32_t target_pos, |
| 745 Instr instr) { |
| 746 int32_t bits = OffsetSizeInBits(instr); |
| 747 int32_t imm = target_pos - (pos + Assembler::kBranchPCOffset); |
| 748 DCHECK((imm & 3) == 0); |
| 749 imm >>= 2; |
| 750 |
| 751 const int32_t mask = (1 << bits) - 1; |
| 752 instr &= ~mask; |
| 753 DCHECK(is_intn(imm, bits)); |
| 754 |
| 755 return instr | (imm & mask); |
| 756 } |
| 757 |
| 758 |
671 void Assembler::target_at_put(int pos, int target_pos, bool is_internal) { | 759 void Assembler::target_at_put(int pos, int target_pos, bool is_internal) { |
672 if (is_internal) { | 760 if (is_internal) { |
673 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 761 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
674 *reinterpret_cast<uint64_t*>(buffer_ + pos) = imm; | 762 *reinterpret_cast<uint64_t*>(buffer_ + pos) = imm; |
675 return; | 763 return; |
676 } | 764 } |
677 Instr instr = instr_at(pos); | 765 Instr instr = instr_at(pos); |
678 if ((instr & ~kImm16Mask) == 0) { | 766 if ((instr & ~kImm16Mask) == 0) { |
679 DCHECK(target_pos == kEndOfChain || target_pos >= 0); | 767 DCHECK(target_pos == kEndOfChain || target_pos >= 0); |
680 // Emitted label constant, not part of a branch. | 768 // Emitted label constant, not part of a branch. |
681 // Make label relative to Code* of generated Code object. | 769 // Make label relative to Code* of generated Code object. |
682 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag)); | 770 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag)); |
683 return; | 771 return; |
684 } | 772 } |
685 | 773 |
686 if (IsBranch(instr)) { | 774 if (IsBranch(instr)) { |
687 int32_t imm18 = target_pos - (pos + kBranchPCOffset); | 775 instr = SetBranchOffset(pos, target_pos, instr); |
688 DCHECK((imm18 & 3) == 0); | 776 instr_at_put(pos, instr); |
689 | |
690 instr &= ~kImm16Mask; | |
691 int32_t imm16 = imm18 >> 2; | |
692 DCHECK(is_int16(imm16)); | |
693 | |
694 instr_at_put(pos, instr | (imm16 & kImm16Mask)); | |
695 } else if (IsLui(instr)) { | 777 } else if (IsLui(instr)) { |
696 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize); | 778 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize); |
697 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize); | 779 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize); |
698 Instr instr_ori2 = instr_at(pos + 3 * Assembler::kInstrSize); | 780 Instr instr_ori2 = instr_at(pos + 3 * Assembler::kInstrSize); |
699 DCHECK(IsOri(instr_ori)); | 781 DCHECK(IsOri(instr_ori)); |
700 DCHECK(IsOri(instr_ori2)); | 782 DCHECK(IsOri(instr_ori2)); |
701 | 783 |
702 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 784 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
703 DCHECK((imm & 3) == 0); | 785 DCHECK((imm & 3) == 0); |
704 | 786 |
(...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
773 while (L->is_linked()) { | 855 while (L->is_linked()) { |
774 int fixup_pos = L->pos(); | 856 int fixup_pos = L->pos(); |
775 int dist = pos - fixup_pos; | 857 int dist = pos - fixup_pos; |
776 is_internal = internal_reference_positions_.find(fixup_pos) != | 858 is_internal = internal_reference_positions_.find(fixup_pos) != |
777 internal_reference_positions_.end(); | 859 internal_reference_positions_.end(); |
778 next(L, is_internal); // Call next before overwriting link with target at | 860 next(L, is_internal); // Call next before overwriting link with target at |
779 // fixup_pos. | 861 // fixup_pos. |
780 Instr instr = instr_at(fixup_pos); | 862 Instr instr = instr_at(fixup_pos); |
781 if (is_internal) { | 863 if (is_internal) { |
782 target_at_put(fixup_pos, pos, is_internal); | 864 target_at_put(fixup_pos, pos, is_internal); |
783 } else if (IsBranch(instr)) { | 865 } else { |
784 if (dist > kMaxBranchOffset) { | 866 if (IsBranch(instr)) { |
785 if (trampoline_pos == kInvalidSlotPos) { | 867 int branch_offset = BranchOffset(instr); |
786 trampoline_pos = get_trampoline_entry(fixup_pos); | 868 if (dist > branch_offset) { |
787 CHECK(trampoline_pos != kInvalidSlotPos); | 869 if (trampoline_pos == kInvalidSlotPos) { |
| 870 trampoline_pos = get_trampoline_entry(fixup_pos); |
| 871 CHECK(trampoline_pos != kInvalidSlotPos); |
| 872 } |
| 873 CHECK((trampoline_pos - fixup_pos) <= branch_offset); |
| 874 target_at_put(fixup_pos, trampoline_pos, false); |
| 875 fixup_pos = trampoline_pos; |
| 876 dist = pos - fixup_pos; |
788 } | 877 } |
789 CHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset); | 878 target_at_put(fixup_pos, pos, false); |
790 target_at_put(fixup_pos, trampoline_pos, false); | 879 } else { |
791 fixup_pos = trampoline_pos; | 880 DCHECK(IsJ(instr) || IsJal(instr) || IsLui(instr) || |
792 dist = pos - fixup_pos; | 881 IsEmittedConstant(instr)); |
| 882 target_at_put(fixup_pos, pos, false); |
793 } | 883 } |
794 target_at_put(fixup_pos, pos, false); | |
795 } else { | |
796 DCHECK(IsJ(instr) || IsJal(instr) || IsLui(instr) || | |
797 IsEmittedConstant(instr)); | |
798 target_at_put(fixup_pos, pos, false); | |
799 } | 884 } |
800 } | 885 } |
801 L->bind_to(pos); | 886 L->bind_to(pos); |
802 | 887 |
803 // Keep track of the last bound label so we don't eliminate any instructions | 888 // Keep track of the last bound label so we don't eliminate any instructions |
804 // before a bound label. | 889 // before a bound label. |
805 if (pos > last_bound_pos_) | 890 if (pos > last_bound_pos_) |
806 last_bound_pos_ = pos; | 891 last_bound_pos_ = pos; |
807 } | 892 } |
808 | 893 |
(...skipping 10 matching lines...) Expand all Loading... |
819 if (link == kEndOfChain) { | 904 if (link == kEndOfChain) { |
820 L->Unuse(); | 905 L->Unuse(); |
821 } else { | 906 } else { |
822 DCHECK(link >= 0); | 907 DCHECK(link >= 0); |
823 L->link_to(link); | 908 L->link_to(link); |
824 } | 909 } |
825 } | 910 } |
826 | 911 |
827 | 912 |
828 bool Assembler::is_near(Label* L) { | 913 bool Assembler::is_near(Label* L) { |
829 if (L->is_bound()) { | 914 DCHECK(L->is_bound()); |
830 return ((pc_offset() - L->pos()) < kMaxBranchOffset - 4 * kInstrSize); | 915 return pc_offset() - L->pos() < kMaxBranchOffset - 4 * kInstrSize; |
831 } | |
832 return false; | |
833 } | 916 } |
834 | 917 |
835 | 918 |
| 919 bool Assembler::is_near(Label* L, OffsetSize bits) { |
| 920 if (L == nullptr || !L->is_bound()) return true; |
| 921 return ((pc_offset() - L->pos()) < |
| 922 (1 << (bits + 2 - 1)) - 1 - 5 * kInstrSize); |
| 923 } |
| 924 |
| 925 |
| 926 bool Assembler::is_near_branch(Label* L) { |
| 927 DCHECK(L->is_bound()); |
| 928 return kArchVariant == kMips64r6 ? is_near_r6(L) : is_near_pre_r6(L); |
| 929 } |
| 930 |
| 931 |
| 932 int Assembler::BranchOffset(Instr instr) { |
| 933 // At pre-R6 and for other R6 branches the offset is 16 bits. |
| 934 int bits = OffsetSize::kOffset16; |
| 935 |
| 936 if (kArchVariant == kMips64r6) { |
| 937 uint32_t opcode = GetOpcodeField(instr); |
| 938 switch (opcode) { |
| 939 // Checks BC or BALC. |
| 940 case BC: |
| 941 case BALC: |
| 942 bits = OffsetSize::kOffset26; |
| 943 break; |
| 944 |
| 945 // Checks BEQZC or BNEZC. |
| 946 case POP66: |
| 947 case POP76: |
| 948 if (GetRsField(instr) != 0) bits = OffsetSize::kOffset21; |
| 949 break; |
| 950 default: |
| 951 break; |
| 952 } |
| 953 } |
| 954 |
| 955 return (1 << (bits + 2 - 1)) - 1; |
| 956 } |
| 957 |
| 958 |
836 // We have to use a temporary register for things that can be relocated even | 959 // We have to use a temporary register for things that can be relocated even |
837 // if they can be encoded in the MIPS's 16 bits of immediate-offset instruction | 960 // if they can be encoded in the MIPS's 16 bits of immediate-offset instruction |
838 // space. There is no guarantee that the relocated location can be similarly | 961 // space. There is no guarantee that the relocated location can be similarly |
839 // encoded. | 962 // encoded. |
840 bool Assembler::MustUseReg(RelocInfo::Mode rmode) { | 963 bool Assembler::MustUseReg(RelocInfo::Mode rmode) { |
841 return !RelocInfo::IsNone(rmode); | 964 return !RelocInfo::IsNone(rmode); |
842 } | 965 } |
843 | 966 |
844 void Assembler::GenInstrRegister(Opcode opcode, | 967 void Assembler::GenInstrRegister(Opcode opcode, |
845 Register rs, | 968 Register rs, |
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
913 SecondaryField func) { | 1036 SecondaryField func) { |
914 DCHECK(fs.is_valid() && rt.is_valid()); | 1037 DCHECK(fs.is_valid() && rt.is_valid()); |
915 Instr instr = | 1038 Instr instr = |
916 opcode | fmt | (rt.code() << kRtShift) | (fs.code() << kFsShift) | func; | 1039 opcode | fmt | (rt.code() << kRtShift) | (fs.code() << kFsShift) | func; |
917 emit(instr); | 1040 emit(instr); |
918 } | 1041 } |
919 | 1042 |
920 | 1043 |
921 // Instructions with immediate value. | 1044 // Instructions with immediate value. |
922 // Registers are in the order of the instruction encoding, from left to right. | 1045 // Registers are in the order of the instruction encoding, from left to right. |
923 void Assembler::GenInstrImmediate(Opcode opcode, | 1046 void Assembler::GenInstrImmediate(Opcode opcode, Register rs, Register rt, |
924 Register rs, | 1047 int32_t j, |
925 Register rt, | 1048 CompactBranchType is_compact_branch) { |
926 int32_t j) { | |
927 DCHECK(rs.is_valid() && rt.is_valid() && (is_int16(j) || is_uint16(j))); | 1049 DCHECK(rs.is_valid() && rt.is_valid() && (is_int16(j) || is_uint16(j))); |
928 Instr instr = opcode | (rs.code() << kRsShift) | (rt.code() << kRtShift) | 1050 Instr instr = opcode | (rs.code() << kRsShift) | (rt.code() << kRtShift) |
929 | (j & kImm16Mask); | 1051 | (j & kImm16Mask); |
| 1052 emit(instr, is_compact_branch); |
| 1053 } |
| 1054 |
| 1055 |
| 1056 void Assembler::GenInstrImmediate(Opcode opcode, Register rs, SecondaryField SF, |
| 1057 int32_t j, |
| 1058 CompactBranchType is_compact_branch) { |
| 1059 DCHECK(rs.is_valid() && (is_int16(j) || is_uint16(j))); |
| 1060 Instr instr = opcode | (rs.code() << kRsShift) | SF | (j & kImm16Mask); |
| 1061 emit(instr, is_compact_branch); |
| 1062 } |
| 1063 |
| 1064 |
| 1065 void Assembler::GenInstrImmediate(Opcode opcode, Register rs, FPURegister ft, |
| 1066 int32_t j, |
| 1067 CompactBranchType is_compact_branch) { |
| 1068 DCHECK(rs.is_valid() && ft.is_valid() && (is_int16(j) || is_uint16(j))); |
| 1069 Instr instr = opcode | (rs.code() << kRsShift) | (ft.code() << kFtShift) |
| 1070 | (j & kImm16Mask); |
| 1071 emit(instr, is_compact_branch); |
| 1072 } |
| 1073 |
| 1074 |
| 1075 void Assembler::GenInstrImmediate(Opcode opcode, Register rs, int32_t offset21, |
| 1076 CompactBranchType is_compact_branch) { |
| 1077 DCHECK(rs.is_valid() && (is_int21(offset21))); |
| 1078 Instr instr = opcode | (rs.code() << kRsShift) | (offset21 & kImm21Mask); |
| 1079 emit(instr, is_compact_branch); |
| 1080 } |
| 1081 |
| 1082 |
| 1083 void Assembler::GenInstrImmediate(Opcode opcode, Register rs, |
| 1084 uint32_t offset21) { |
| 1085 DCHECK(rs.is_valid() && (is_uint21(offset21))); |
| 1086 Instr instr = opcode | (rs.code() << kRsShift) | (offset21 & kImm21Mask); |
930 emit(instr); | 1087 emit(instr); |
931 } | 1088 } |
932 | 1089 |
933 | 1090 |
934 void Assembler::GenInstrImmediate(Opcode opcode, | 1091 void Assembler::GenInstrImmediate(Opcode opcode, int32_t offset26, |
935 Register rs, | 1092 CompactBranchType is_compact_branch) { |
936 SecondaryField SF, | 1093 DCHECK(is_int26(offset26)); |
937 int32_t j) { | 1094 Instr instr = opcode | (offset26 & kImm26Mask); |
938 DCHECK(rs.is_valid() && (is_int16(j) || is_uint16(j))); | 1095 emit(instr, is_compact_branch); |
939 Instr instr = opcode | (rs.code() << kRsShift) | SF | (j & kImm16Mask); | |
940 emit(instr); | |
941 } | 1096 } |
942 | 1097 |
943 | 1098 |
944 void Assembler::GenInstrImmediate(Opcode opcode, | |
945 Register rs, | |
946 FPURegister ft, | |
947 int32_t j) { | |
948 DCHECK(rs.is_valid() && ft.is_valid() && (is_int16(j) || is_uint16(j))); | |
949 Instr instr = opcode | (rs.code() << kRsShift) | (ft.code() << kFtShift) | |
950 | (j & kImm16Mask); | |
951 emit(instr); | |
952 } | |
953 | |
954 | |
955 void Assembler::GenInstrImmediate(Opcode opcode, Register rs, int32_t j) { | |
956 DCHECK(rs.is_valid() && (is_uint21(j))); | |
957 Instr instr = opcode | (rs.code() << kRsShift) | (j & kImm21Mask); | |
958 emit(instr); | |
959 } | |
960 | |
961 | |
962 void Assembler::GenInstrImmediate(Opcode opcode, int32_t offset26) { | |
963 DCHECK(is_int26(offset26)); | |
964 Instr instr = opcode | (offset26 & kImm26Mask); | |
965 emit(instr); | |
966 } | |
967 | |
968 | |
969 void Assembler::GenInstrJump(Opcode opcode, | 1099 void Assembler::GenInstrJump(Opcode opcode, |
970 uint32_t address) { | 1100 uint32_t address) { |
971 BlockTrampolinePoolScope block_trampoline_pool(this); | 1101 BlockTrampolinePoolScope block_trampoline_pool(this); |
972 DCHECK(is_uint26(address)); | 1102 DCHECK(is_uint26(address)); |
973 Instr instr = opcode | address; | 1103 Instr instr = opcode | address; |
974 emit(instr); | 1104 emit(instr); |
975 BlockTrampolinePoolFor(1); // For associated delay slot. | 1105 BlockTrampolinePoolFor(1); // For associated delay slot. |
976 } | 1106 } |
977 | 1107 |
978 | 1108 |
(...skipping 28 matching lines...) Expand all Loading... |
1007 } | 1137 } |
1008 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 1138 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
1009 DCHECK((imm & 3) == 0); | 1139 DCHECK((imm & 3) == 0); |
1010 | 1140 |
1011 return imm; | 1141 return imm; |
1012 } | 1142 } |
1013 | 1143 |
1014 | 1144 |
1015 uint64_t Assembler::jump_offset(Label* L) { | 1145 uint64_t Assembler::jump_offset(Label* L) { |
1016 int64_t target_pos; | 1146 int64_t target_pos; |
| 1147 int32_t pad = IsPrevInstrCompactBranch() ? kInstrSize : 0; |
| 1148 |
1017 if (L->is_bound()) { | 1149 if (L->is_bound()) { |
1018 target_pos = L->pos(); | 1150 target_pos = L->pos(); |
1019 } else { | 1151 } else { |
1020 if (L->is_linked()) { | 1152 if (L->is_linked()) { |
1021 target_pos = L->pos(); // L's link. | 1153 target_pos = L->pos(); // L's link. |
1022 L->link_to(pc_offset()); | 1154 L->link_to(pc_offset() + pad); |
1023 } else { | 1155 } else { |
1024 L->link_to(pc_offset()); | 1156 L->link_to(pc_offset() + pad); |
1025 return kEndOfJumpChain; | 1157 return kEndOfJumpChain; |
1026 } | 1158 } |
1027 } | 1159 } |
1028 int64_t imm = target_pos - pc_offset(); | 1160 int64_t imm = target_pos - (pc_offset() + pad); |
1029 DCHECK((imm & 3) == 0); | 1161 DCHECK((imm & 3) == 0); |
1030 | 1162 |
1031 return static_cast<uint64_t>(imm); | 1163 return static_cast<uint64_t>(imm); |
1032 } | 1164 } |
1033 | 1165 |
1034 | 1166 |
1035 int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { | 1167 int32_t Assembler::branch_offset_helper(Label* L, OffsetSize bits) { |
1036 int32_t target_pos; | 1168 int32_t target_pos; |
| 1169 int32_t pad = IsPrevInstrCompactBranch() ? kInstrSize : 0; |
| 1170 |
1037 if (L->is_bound()) { | 1171 if (L->is_bound()) { |
1038 target_pos = L->pos(); | 1172 target_pos = L->pos(); |
1039 } else { | 1173 } else { |
1040 if (L->is_linked()) { | 1174 if (L->is_linked()) { |
1041 target_pos = L->pos(); | 1175 target_pos = L->pos(); |
1042 L->link_to(pc_offset()); | 1176 L->link_to(pc_offset() + pad); |
1043 } else { | 1177 } else { |
1044 L->link_to(pc_offset()); | 1178 L->link_to(pc_offset() + pad); |
1045 if (!trampoline_emitted_) { | 1179 if (!trampoline_emitted_) { |
1046 unbound_labels_count_++; | 1180 unbound_labels_count_++; |
1047 next_buffer_check_ -= kTrampolineSlotsSize; | 1181 next_buffer_check_ -= kTrampolineSlotsSize; |
1048 } | |
1049 return kEndOfChain; | |
1050 } | |
1051 } | |
1052 | |
1053 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset); | |
1054 DCHECK((offset & 3) == 0); | |
1055 DCHECK(is_int16(offset >> 2)); | |
1056 | |
1057 return offset; | |
1058 } | |
1059 | |
1060 | |
1061 int32_t Assembler::branch_offset_compact(Label* L, | |
1062 bool jump_elimination_allowed) { | |
1063 int32_t target_pos; | |
1064 if (L->is_bound()) { | |
1065 target_pos = L->pos(); | |
1066 } else { | |
1067 if (L->is_linked()) { | |
1068 target_pos = L->pos(); | |
1069 L->link_to(pc_offset()); | |
1070 } else { | |
1071 L->link_to(pc_offset()); | |
1072 if (!trampoline_emitted_) { | |
1073 unbound_labels_count_++; | |
1074 next_buffer_check_ -= kTrampolineSlotsSize; | |
1075 } | 1182 } |
1076 return kEndOfChain; | 1183 return kEndOfChain; |
1077 } | 1184 } |
1078 } | 1185 } |
1079 | 1186 |
1080 int32_t offset = target_pos - pc_offset(); | 1187 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset + pad); |
| 1188 DCHECK(is_intn(offset, bits + 2)); |
1081 DCHECK((offset & 3) == 0); | 1189 DCHECK((offset & 3) == 0); |
1082 DCHECK(is_int16(offset >> 2)); | |
1083 | 1190 |
1084 return offset; | 1191 return offset; |
1085 } | 1192 } |
1086 | |
1087 | |
1088 int32_t Assembler::branch_offset21(Label* L, bool jump_elimination_allowed) { | |
1089 int32_t target_pos; | |
1090 if (L->is_bound()) { | |
1091 target_pos = L->pos(); | |
1092 } else { | |
1093 if (L->is_linked()) { | |
1094 target_pos = L->pos(); | |
1095 L->link_to(pc_offset()); | |
1096 } else { | |
1097 L->link_to(pc_offset()); | |
1098 if (!trampoline_emitted_) { | |
1099 unbound_labels_count_++; | |
1100 next_buffer_check_ -= kTrampolineSlotsSize; | |
1101 } | |
1102 return kEndOfChain; | |
1103 } | |
1104 } | |
1105 | |
1106 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset); | |
1107 DCHECK((offset & 3) == 0); | |
1108 DCHECK(((offset >> 2) & 0xFFE00000) == 0); // Offset is 21bit width. | |
1109 | |
1110 return offset; | |
1111 } | |
1112 | |
1113 | |
1114 int32_t Assembler::branch_offset21_compact(Label* L, | |
1115 bool jump_elimination_allowed) { | |
1116 int32_t target_pos; | |
1117 if (L->is_bound()) { | |
1118 target_pos = L->pos(); | |
1119 } else { | |
1120 if (L->is_linked()) { | |
1121 target_pos = L->pos(); | |
1122 L->link_to(pc_offset()); | |
1123 } else { | |
1124 L->link_to(pc_offset()); | |
1125 if (!trampoline_emitted_) { | |
1126 unbound_labels_count_++; | |
1127 next_buffer_check_ -= kTrampolineSlotsSize; | |
1128 } | |
1129 return kEndOfChain; | |
1130 } | |
1131 } | |
1132 | |
1133 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset); | |
1134 DCHECK((offset & 3) == 0); | |
1135 DCHECK(((offset >> 2) & 0xFFE00000) == 0); // Offset is 21bit width. | |
1136 | |
1137 return offset; | |
1138 } | |
1139 | 1193 |
1140 | 1194 |
1141 void Assembler::label_at_put(Label* L, int at_offset) { | 1195 void Assembler::label_at_put(Label* L, int at_offset) { |
1142 int target_pos; | 1196 int target_pos; |
1143 if (L->is_bound()) { | 1197 if (L->is_bound()) { |
1144 target_pos = L->pos(); | 1198 target_pos = L->pos(); |
1145 instr_at_put(at_offset, target_pos + (Code::kHeaderSize - kHeapObjectTag)); | 1199 instr_at_put(at_offset, target_pos + (Code::kHeaderSize - kHeapObjectTag)); |
1146 } else { | 1200 } else { |
1147 if (L->is_linked()) { | 1201 if (L->is_linked()) { |
1148 target_pos = L->pos(); // L's link. | 1202 target_pos = L->pos(); // L's link. |
(...skipping 23 matching lines...) Expand all Loading... |
1172 | 1226 |
1173 | 1227 |
1174 void Assembler::bal(int16_t offset) { | 1228 void Assembler::bal(int16_t offset) { |
1175 positions_recorder()->WriteRecordedPositions(); | 1229 positions_recorder()->WriteRecordedPositions(); |
1176 bgezal(zero_reg, offset); | 1230 bgezal(zero_reg, offset); |
1177 } | 1231 } |
1178 | 1232 |
1179 | 1233 |
1180 void Assembler::bc(int32_t offset) { | 1234 void Assembler::bc(int32_t offset) { |
1181 DCHECK(kArchVariant == kMips64r6); | 1235 DCHECK(kArchVariant == kMips64r6); |
1182 GenInstrImmediate(BC, offset); | 1236 GenInstrImmediate(BC, offset, CompactBranchType::COMPACT_BRANCH); |
1183 } | 1237 } |
1184 | 1238 |
1185 | 1239 |
1186 void Assembler::balc(int32_t offset) { | 1240 void Assembler::balc(int32_t offset) { |
1187 DCHECK(kArchVariant == kMips64r6); | 1241 DCHECK(kArchVariant == kMips64r6); |
1188 positions_recorder()->WriteRecordedPositions(); | 1242 positions_recorder()->WriteRecordedPositions(); |
1189 GenInstrImmediate(BALC, offset); | 1243 GenInstrImmediate(BALC, offset, CompactBranchType::COMPACT_BRANCH); |
1190 } | 1244 } |
1191 | 1245 |
1192 | 1246 |
1193 void Assembler::beq(Register rs, Register rt, int16_t offset) { | 1247 void Assembler::beq(Register rs, Register rt, int16_t offset) { |
1194 BlockTrampolinePoolScope block_trampoline_pool(this); | 1248 BlockTrampolinePoolScope block_trampoline_pool(this); |
1195 GenInstrImmediate(BEQ, rs, rt, offset); | 1249 GenInstrImmediate(BEQ, rs, rt, offset); |
1196 BlockTrampolinePoolFor(1); // For associated delay slot. | 1250 BlockTrampolinePoolFor(1); // For associated delay slot. |
1197 } | 1251 } |
1198 | 1252 |
1199 | 1253 |
1200 void Assembler::bgez(Register rs, int16_t offset) { | 1254 void Assembler::bgez(Register rs, int16_t offset) { |
1201 BlockTrampolinePoolScope block_trampoline_pool(this); | 1255 BlockTrampolinePoolScope block_trampoline_pool(this); |
1202 GenInstrImmediate(REGIMM, rs, BGEZ, offset); | 1256 GenInstrImmediate(REGIMM, rs, BGEZ, offset); |
1203 BlockTrampolinePoolFor(1); // For associated delay slot. | 1257 BlockTrampolinePoolFor(1); // For associated delay slot. |
1204 } | 1258 } |
1205 | 1259 |
1206 | 1260 |
1207 void Assembler::bgezc(Register rt, int16_t offset) { | 1261 void Assembler::bgezc(Register rt, int16_t offset) { |
1208 DCHECK(kArchVariant == kMips64r6); | 1262 DCHECK(kArchVariant == kMips64r6); |
1209 DCHECK(!(rt.is(zero_reg))); | 1263 DCHECK(!(rt.is(zero_reg))); |
1210 GenInstrImmediate(BLEZL, rt, rt, offset); | 1264 GenInstrImmediate(BLEZL, rt, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1211 } | 1265 } |
1212 | 1266 |
1213 | 1267 |
1214 void Assembler::bgeuc(Register rs, Register rt, int16_t offset) { | 1268 void Assembler::bgeuc(Register rs, Register rt, int16_t offset) { |
1215 DCHECK(kArchVariant == kMips64r6); | 1269 DCHECK(kArchVariant == kMips64r6); |
1216 DCHECK(!(rs.is(zero_reg))); | 1270 DCHECK(!(rs.is(zero_reg))); |
1217 DCHECK(!(rt.is(zero_reg))); | 1271 DCHECK(!(rt.is(zero_reg))); |
1218 DCHECK(rs.code() != rt.code()); | 1272 DCHECK(rs.code() != rt.code()); |
1219 GenInstrImmediate(BLEZ, rs, rt, offset); | 1273 GenInstrImmediate(BLEZ, rs, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1220 } | 1274 } |
1221 | 1275 |
1222 | 1276 |
1223 void Assembler::bgec(Register rs, Register rt, int16_t offset) { | 1277 void Assembler::bgec(Register rs, Register rt, int16_t offset) { |
1224 DCHECK(kArchVariant == kMips64r6); | 1278 DCHECK(kArchVariant == kMips64r6); |
1225 DCHECK(!(rs.is(zero_reg))); | 1279 DCHECK(!(rs.is(zero_reg))); |
1226 DCHECK(!(rt.is(zero_reg))); | 1280 DCHECK(!(rt.is(zero_reg))); |
1227 DCHECK(rs.code() != rt.code()); | 1281 DCHECK(rs.code() != rt.code()); |
1228 GenInstrImmediate(BLEZL, rs, rt, offset); | 1282 GenInstrImmediate(BLEZL, rs, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1229 } | 1283 } |
1230 | 1284 |
1231 | 1285 |
1232 void Assembler::bgezal(Register rs, int16_t offset) { | 1286 void Assembler::bgezal(Register rs, int16_t offset) { |
1233 DCHECK(kArchVariant != kMips64r6 || rs.is(zero_reg)); | 1287 DCHECK(kArchVariant != kMips64r6 || rs.is(zero_reg)); |
1234 BlockTrampolinePoolScope block_trampoline_pool(this); | 1288 BlockTrampolinePoolScope block_trampoline_pool(this); |
1235 positions_recorder()->WriteRecordedPositions(); | 1289 positions_recorder()->WriteRecordedPositions(); |
1236 GenInstrImmediate(REGIMM, rs, BGEZAL, offset); | 1290 GenInstrImmediate(REGIMM, rs, BGEZAL, offset); |
1237 BlockTrampolinePoolFor(1); // For associated delay slot. | 1291 BlockTrampolinePoolFor(1); // For associated delay slot. |
1238 } | 1292 } |
1239 | 1293 |
1240 | 1294 |
1241 void Assembler::bgtz(Register rs, int16_t offset) { | 1295 void Assembler::bgtz(Register rs, int16_t offset) { |
1242 BlockTrampolinePoolScope block_trampoline_pool(this); | 1296 BlockTrampolinePoolScope block_trampoline_pool(this); |
1243 GenInstrImmediate(BGTZ, rs, zero_reg, offset); | 1297 GenInstrImmediate(BGTZ, rs, zero_reg, offset); |
1244 BlockTrampolinePoolFor(1); // For associated delay slot. | 1298 BlockTrampolinePoolFor(1); // For associated delay slot. |
1245 } | 1299 } |
1246 | 1300 |
1247 | 1301 |
1248 void Assembler::bgtzc(Register rt, int16_t offset) { | 1302 void Assembler::bgtzc(Register rt, int16_t offset) { |
1249 DCHECK(kArchVariant == kMips64r6); | 1303 DCHECK(kArchVariant == kMips64r6); |
1250 DCHECK(!(rt.is(zero_reg))); | 1304 DCHECK(!(rt.is(zero_reg))); |
1251 GenInstrImmediate(BGTZL, zero_reg, rt, offset); | 1305 GenInstrImmediate(BGTZL, zero_reg, rt, offset, |
| 1306 CompactBranchType::COMPACT_BRANCH); |
1252 } | 1307 } |
1253 | 1308 |
1254 | 1309 |
1255 void Assembler::blez(Register rs, int16_t offset) { | 1310 void Assembler::blez(Register rs, int16_t offset) { |
1256 BlockTrampolinePoolScope block_trampoline_pool(this); | 1311 BlockTrampolinePoolScope block_trampoline_pool(this); |
1257 GenInstrImmediate(BLEZ, rs, zero_reg, offset); | 1312 GenInstrImmediate(BLEZ, rs, zero_reg, offset); |
1258 BlockTrampolinePoolFor(1); // For associated delay slot. | 1313 BlockTrampolinePoolFor(1); // For associated delay slot. |
1259 } | 1314 } |
1260 | 1315 |
1261 | 1316 |
1262 void Assembler::blezc(Register rt, int16_t offset) { | 1317 void Assembler::blezc(Register rt, int16_t offset) { |
1263 DCHECK(kArchVariant == kMips64r6); | 1318 DCHECK(kArchVariant == kMips64r6); |
1264 DCHECK(!(rt.is(zero_reg))); | 1319 DCHECK(!(rt.is(zero_reg))); |
1265 GenInstrImmediate(BLEZL, zero_reg, rt, offset); | 1320 GenInstrImmediate(BLEZL, zero_reg, rt, offset, |
| 1321 CompactBranchType::COMPACT_BRANCH); |
1266 } | 1322 } |
1267 | 1323 |
1268 | 1324 |
1269 void Assembler::bltzc(Register rt, int16_t offset) { | 1325 void Assembler::bltzc(Register rt, int16_t offset) { |
1270 DCHECK(kArchVariant == kMips64r6); | 1326 DCHECK(kArchVariant == kMips64r6); |
1271 DCHECK(!(rt.is(zero_reg))); | 1327 DCHECK(!rt.is(zero_reg)); |
1272 GenInstrImmediate(BGTZL, rt, rt, offset); | 1328 GenInstrImmediate(BGTZL, rt, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1273 } | 1329 } |
1274 | 1330 |
1275 | 1331 |
1276 void Assembler::bltuc(Register rs, Register rt, int16_t offset) { | 1332 void Assembler::bltuc(Register rs, Register rt, int16_t offset) { |
1277 DCHECK(kArchVariant == kMips64r6); | 1333 DCHECK(kArchVariant == kMips64r6); |
1278 DCHECK(!(rs.is(zero_reg))); | 1334 DCHECK(!(rs.is(zero_reg))); |
1279 DCHECK(!(rt.is(zero_reg))); | 1335 DCHECK(!(rt.is(zero_reg))); |
1280 DCHECK(rs.code() != rt.code()); | 1336 DCHECK(rs.code() != rt.code()); |
1281 GenInstrImmediate(BGTZ, rs, rt, offset); | 1337 GenInstrImmediate(BGTZ, rs, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1282 } | 1338 } |
1283 | 1339 |
1284 | 1340 |
1285 void Assembler::bltc(Register rs, Register rt, int16_t offset) { | 1341 void Assembler::bltc(Register rs, Register rt, int16_t offset) { |
1286 DCHECK(kArchVariant == kMips64r6); | 1342 DCHECK(kArchVariant == kMips64r6); |
1287 DCHECK(!(rs.is(zero_reg))); | 1343 DCHECK(!rs.is(zero_reg)); |
1288 DCHECK(!(rt.is(zero_reg))); | 1344 DCHECK(!rt.is(zero_reg)); |
1289 DCHECK(rs.code() != rt.code()); | 1345 DCHECK(rs.code() != rt.code()); |
1290 GenInstrImmediate(BGTZL, rs, rt, offset); | 1346 GenInstrImmediate(BGTZL, rs, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1291 } | 1347 } |
1292 | 1348 |
1293 | 1349 |
1294 void Assembler::bltz(Register rs, int16_t offset) { | 1350 void Assembler::bltz(Register rs, int16_t offset) { |
1295 BlockTrampolinePoolScope block_trampoline_pool(this); | 1351 BlockTrampolinePoolScope block_trampoline_pool(this); |
1296 GenInstrImmediate(REGIMM, rs, BLTZ, offset); | 1352 GenInstrImmediate(REGIMM, rs, BLTZ, offset); |
1297 BlockTrampolinePoolFor(1); // For associated delay slot. | 1353 BlockTrampolinePoolFor(1); // For associated delay slot. |
1298 } | 1354 } |
1299 | 1355 |
1300 | 1356 |
(...skipping 10 matching lines...) Expand all Loading... |
1311 BlockTrampolinePoolScope block_trampoline_pool(this); | 1367 BlockTrampolinePoolScope block_trampoline_pool(this); |
1312 GenInstrImmediate(BNE, rs, rt, offset); | 1368 GenInstrImmediate(BNE, rs, rt, offset); |
1313 BlockTrampolinePoolFor(1); // For associated delay slot. | 1369 BlockTrampolinePoolFor(1); // For associated delay slot. |
1314 } | 1370 } |
1315 | 1371 |
1316 | 1372 |
1317 void Assembler::bovc(Register rs, Register rt, int16_t offset) { | 1373 void Assembler::bovc(Register rs, Register rt, int16_t offset) { |
1318 DCHECK(kArchVariant == kMips64r6); | 1374 DCHECK(kArchVariant == kMips64r6); |
1319 DCHECK(!(rs.is(zero_reg))); | 1375 DCHECK(!(rs.is(zero_reg))); |
1320 DCHECK(rs.code() >= rt.code()); | 1376 DCHECK(rs.code() >= rt.code()); |
1321 GenInstrImmediate(ADDI, rs, rt, offset); | 1377 GenInstrImmediate(ADDI, rs, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1322 } | 1378 } |
1323 | 1379 |
1324 | 1380 |
1325 void Assembler::bnvc(Register rs, Register rt, int16_t offset) { | 1381 void Assembler::bnvc(Register rs, Register rt, int16_t offset) { |
1326 DCHECK(kArchVariant == kMips64r6); | 1382 DCHECK(kArchVariant == kMips64r6); |
1327 DCHECK(!(rs.is(zero_reg))); | 1383 DCHECK(!(rs.is(zero_reg))); |
1328 DCHECK(rs.code() >= rt.code()); | 1384 DCHECK(rs.code() >= rt.code()); |
1329 GenInstrImmediate(DADDI, rs, rt, offset); | 1385 GenInstrImmediate(DADDI, rs, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1330 } | 1386 } |
1331 | 1387 |
1332 | 1388 |
1333 void Assembler::blezalc(Register rt, int16_t offset) { | 1389 void Assembler::blezalc(Register rt, int16_t offset) { |
1334 DCHECK(kArchVariant == kMips64r6); | 1390 DCHECK(kArchVariant == kMips64r6); |
1335 DCHECK(!(rt.is(zero_reg))); | 1391 DCHECK(!(rt.is(zero_reg))); |
1336 GenInstrImmediate(BLEZ, zero_reg, rt, offset); | 1392 positions_recorder()->WriteRecordedPositions(); |
| 1393 GenInstrImmediate(BLEZ, zero_reg, rt, offset, |
| 1394 CompactBranchType::COMPACT_BRANCH); |
1337 } | 1395 } |
1338 | 1396 |
1339 | 1397 |
1340 void Assembler::bgezalc(Register rt, int16_t offset) { | 1398 void Assembler::bgezalc(Register rt, int16_t offset) { |
1341 DCHECK(kArchVariant == kMips64r6); | 1399 DCHECK(kArchVariant == kMips64r6); |
1342 DCHECK(!(rt.is(zero_reg))); | 1400 DCHECK(!(rt.is(zero_reg))); |
1343 GenInstrImmediate(BLEZ, rt, rt, offset); | 1401 positions_recorder()->WriteRecordedPositions(); |
| 1402 GenInstrImmediate(BLEZ, rt, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1344 } | 1403 } |
1345 | 1404 |
1346 | 1405 |
1347 void Assembler::bgezall(Register rs, int16_t offset) { | 1406 void Assembler::bgezall(Register rs, int16_t offset) { |
1348 DCHECK(kArchVariant != kMips64r6); | 1407 DCHECK(kArchVariant != kMips64r6); |
1349 DCHECK(!(rs.is(zero_reg))); | 1408 DCHECK(!(rs.is(zero_reg))); |
1350 BlockTrampolinePoolScope block_trampoline_pool(this); | 1409 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1410 positions_recorder()->WriteRecordedPositions(); |
1351 GenInstrImmediate(REGIMM, rs, BGEZALL, offset); | 1411 GenInstrImmediate(REGIMM, rs, BGEZALL, offset); |
1352 BlockTrampolinePoolFor(1); // For associated delay slot. | 1412 BlockTrampolinePoolFor(1); // For associated delay slot. |
1353 } | 1413 } |
1354 | 1414 |
1355 | 1415 |
1356 void Assembler::bltzalc(Register rt, int16_t offset) { | 1416 void Assembler::bltzalc(Register rt, int16_t offset) { |
1357 DCHECK(kArchVariant == kMips64r6); | 1417 DCHECK(kArchVariant == kMips64r6); |
1358 DCHECK(!(rt.is(zero_reg))); | 1418 DCHECK(!(rt.is(zero_reg))); |
1359 GenInstrImmediate(BGTZ, rt, rt, offset); | 1419 positions_recorder()->WriteRecordedPositions(); |
| 1420 GenInstrImmediate(BGTZ, rt, rt, offset, CompactBranchType::COMPACT_BRANCH); |
1360 } | 1421 } |
1361 | 1422 |
1362 | 1423 |
1363 void Assembler::bgtzalc(Register rt, int16_t offset) { | 1424 void Assembler::bgtzalc(Register rt, int16_t offset) { |
1364 DCHECK(kArchVariant == kMips64r6); | 1425 DCHECK(kArchVariant == kMips64r6); |
1365 DCHECK(!(rt.is(zero_reg))); | 1426 DCHECK(!(rt.is(zero_reg))); |
1366 GenInstrImmediate(BGTZ, zero_reg, rt, offset); | 1427 positions_recorder()->WriteRecordedPositions(); |
| 1428 GenInstrImmediate(BGTZ, zero_reg, rt, offset, |
| 1429 CompactBranchType::COMPACT_BRANCH); |
1367 } | 1430 } |
1368 | 1431 |
1369 | 1432 |
1370 void Assembler::beqzalc(Register rt, int16_t offset) { | 1433 void Assembler::beqzalc(Register rt, int16_t offset) { |
1371 DCHECK(kArchVariant == kMips64r6); | 1434 DCHECK(kArchVariant == kMips64r6); |
1372 DCHECK(!(rt.is(zero_reg))); | 1435 DCHECK(!(rt.is(zero_reg))); |
1373 GenInstrImmediate(ADDI, zero_reg, rt, offset); | 1436 positions_recorder()->WriteRecordedPositions(); |
| 1437 GenInstrImmediate(ADDI, zero_reg, rt, offset, |
| 1438 CompactBranchType::COMPACT_BRANCH); |
1374 } | 1439 } |
1375 | 1440 |
1376 | 1441 |
1377 void Assembler::bnezalc(Register rt, int16_t offset) { | 1442 void Assembler::bnezalc(Register rt, int16_t offset) { |
1378 DCHECK(kArchVariant == kMips64r6); | 1443 DCHECK(kArchVariant == kMips64r6); |
1379 DCHECK(!(rt.is(zero_reg))); | 1444 DCHECK(!(rt.is(zero_reg))); |
1380 GenInstrImmediate(DADDI, zero_reg, rt, offset); | 1445 positions_recorder()->WriteRecordedPositions(); |
| 1446 GenInstrImmediate(DADDI, zero_reg, rt, offset, |
| 1447 CompactBranchType::COMPACT_BRANCH); |
1381 } | 1448 } |
1382 | 1449 |
1383 | 1450 |
1384 void Assembler::beqc(Register rs, Register rt, int16_t offset) { | 1451 void Assembler::beqc(Register rs, Register rt, int16_t offset) { |
1385 DCHECK(kArchVariant == kMips64r6); | 1452 DCHECK(kArchVariant == kMips64r6); |
1386 DCHECK(rs.code() < rt.code()); | 1453 DCHECK(rs.code() != rt.code() && rs.code() != 0 && rt.code() != 0); |
1387 GenInstrImmediate(ADDI, rs, rt, offset); | 1454 if (rs.code() < rt.code()) { |
| 1455 GenInstrImmediate(ADDI, rs, rt, offset, CompactBranchType::COMPACT_BRANCH); |
| 1456 } else { |
| 1457 GenInstrImmediate(ADDI, rt, rs, offset, CompactBranchType::COMPACT_BRANCH); |
| 1458 } |
1388 } | 1459 } |
1389 | 1460 |
1390 | 1461 |
1391 void Assembler::beqzc(Register rs, int32_t offset) { | 1462 void Assembler::beqzc(Register rs, int32_t offset) { |
1392 DCHECK(kArchVariant == kMips64r6); | 1463 DCHECK(kArchVariant == kMips64r6); |
1393 DCHECK(!(rs.is(zero_reg))); | 1464 DCHECK(!(rs.is(zero_reg))); |
1394 Instr instr = POP66 | (rs.code() << kRsShift) | (offset & kImm21Mask); | 1465 GenInstrImmediate(POP66, rs, offset, CompactBranchType::COMPACT_BRANCH); |
1395 emit(instr); | |
1396 } | 1466 } |
1397 | 1467 |
1398 | 1468 |
1399 void Assembler::bnec(Register rs, Register rt, int16_t offset) { | 1469 void Assembler::bnec(Register rs, Register rt, int16_t offset) { |
1400 DCHECK(kArchVariant == kMips64r6); | 1470 DCHECK(kArchVariant == kMips64r6); |
1401 DCHECK(rs.code() < rt.code()); | 1471 DCHECK(rs.code() != rt.code() && rs.code() != 0 && rt.code() != 0); |
1402 GenInstrImmediate(DADDI, rs, rt, offset); | 1472 if (rs.code() < rt.code()) { |
| 1473 GenInstrImmediate(DADDI, rs, rt, offset, CompactBranchType::COMPACT_BRANCH); |
| 1474 } else { |
| 1475 GenInstrImmediate(DADDI, rt, rs, offset, CompactBranchType::COMPACT_BRANCH); |
| 1476 } |
1403 } | 1477 } |
1404 | 1478 |
1405 | 1479 |
1406 void Assembler::bnezc(Register rs, int32_t offset) { | 1480 void Assembler::bnezc(Register rs, int32_t offset) { |
1407 DCHECK(kArchVariant == kMips64r6); | 1481 DCHECK(kArchVariant == kMips64r6); |
1408 DCHECK(!(rs.is(zero_reg))); | 1482 DCHECK(!(rs.is(zero_reg))); |
1409 Instr instr = POP76 | (rs.code() << kRsShift) | offset; | 1483 GenInstrImmediate(POP76, rs, offset, CompactBranchType::COMPACT_BRANCH); |
1410 emit(instr); | |
1411 } | 1484 } |
1412 | 1485 |
1413 | 1486 |
1414 void Assembler::j(int64_t target) { | 1487 void Assembler::j(int64_t target) { |
1415 BlockTrampolinePoolScope block_trampoline_pool(this); | 1488 BlockTrampolinePoolScope block_trampoline_pool(this); |
1416 GenInstrJump(J, static_cast<uint32_t>(target >> 2) & kImm26Mask); | 1489 GenInstrJump(J, static_cast<uint32_t>(target >> 2) & kImm26Mask); |
1417 BlockTrampolinePoolFor(1); // For associated delay slot. | 1490 BlockTrampolinePoolFor(1); // For associated delay slot. |
1418 } | 1491 } |
1419 | 1492 |
1420 | 1493 |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1471 DCHECK(rs.code() != rd.code()); | 1544 DCHECK(rs.code() != rd.code()); |
1472 BlockTrampolinePoolScope block_trampoline_pool(this); | 1545 BlockTrampolinePoolScope block_trampoline_pool(this); |
1473 positions_recorder()->WriteRecordedPositions(); | 1546 positions_recorder()->WriteRecordedPositions(); |
1474 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 0, JALR); | 1547 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 0, JALR); |
1475 BlockTrampolinePoolFor(1); // For associated delay slot. | 1548 BlockTrampolinePoolFor(1); // For associated delay slot. |
1476 } | 1549 } |
1477 | 1550 |
1478 | 1551 |
1479 void Assembler::jic(Register rt, int16_t offset) { | 1552 void Assembler::jic(Register rt, int16_t offset) { |
1480 DCHECK(kArchVariant == kMips64r6); | 1553 DCHECK(kArchVariant == kMips64r6); |
1481 Instr instr = POP66 | (JIC << kRsShift) | (rt.code() << kRtShift) | | 1554 GenInstrImmediate(POP66, zero_reg, rt, offset); |
1482 (offset & kImm16Mask); | |
1483 emit(instr); | |
1484 } | 1555 } |
1485 | 1556 |
1486 | 1557 |
1487 void Assembler::jialc(Register rt, int16_t offset) { | 1558 void Assembler::jialc(Register rt, int16_t offset) { |
1488 DCHECK(kArchVariant == kMips64r6); | 1559 DCHECK(kArchVariant == kMips64r6); |
1489 positions_recorder()->WriteRecordedPositions(); | 1560 positions_recorder()->WriteRecordedPositions(); |
1490 GenInstrImmediate(POP76, zero_reg, rt, offset); | 1561 GenInstrImmediate(POP76, zero_reg, rt, offset); |
1491 } | 1562 } |
1492 | 1563 |
1493 | 1564 |
(...skipping 546 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2040 GenInstrImmediate(SD, at, rd, 0); // Equiv to sw(rd, MemOperand(at, 0)); | 2111 GenInstrImmediate(SD, at, rd, 0); // Equiv to sw(rd, MemOperand(at, 0)); |
2041 } | 2112 } |
2042 } | 2113 } |
2043 | 2114 |
2044 | 2115 |
2045 // ---------PC-Relative instructions----------- | 2116 // ---------PC-Relative instructions----------- |
2046 | 2117 |
2047 void Assembler::addiupc(Register rs, int32_t imm19) { | 2118 void Assembler::addiupc(Register rs, int32_t imm19) { |
2048 DCHECK(kArchVariant == kMips64r6); | 2119 DCHECK(kArchVariant == kMips64r6); |
2049 DCHECK(rs.is_valid() && is_int19(imm19)); | 2120 DCHECK(rs.is_valid() && is_int19(imm19)); |
2050 int32_t imm21 = ADDIUPC << kImm19Bits | (imm19 & kImm19Mask); | 2121 uint32_t imm21 = ADDIUPC << kImm19Bits | (imm19 & kImm19Mask); |
2051 GenInstrImmediate(PCREL, rs, imm21); | 2122 GenInstrImmediate(PCREL, rs, imm21); |
2052 } | 2123 } |
2053 | 2124 |
2054 | 2125 |
2055 void Assembler::lwpc(Register rs, int32_t offset19) { | 2126 void Assembler::lwpc(Register rs, int32_t offset19) { |
2056 DCHECK(kArchVariant == kMips64r6); | 2127 DCHECK(kArchVariant == kMips64r6); |
2057 DCHECK(rs.is_valid() && is_int19(offset19)); | 2128 DCHECK(rs.is_valid() && is_int19(offset19)); |
2058 int32_t imm21 = LWPC << kImm19Bits | (offset19 & kImm19Mask); | 2129 uint32_t imm21 = LWPC << kImm19Bits | (offset19 & kImm19Mask); |
2059 GenInstrImmediate(PCREL, rs, imm21); | 2130 GenInstrImmediate(PCREL, rs, imm21); |
2060 } | 2131 } |
2061 | 2132 |
2062 | 2133 |
2063 void Assembler::lwupc(Register rs, int32_t offset19) { | 2134 void Assembler::lwupc(Register rs, int32_t offset19) { |
2064 DCHECK(kArchVariant == kMips64r6); | 2135 DCHECK(kArchVariant == kMips64r6); |
2065 DCHECK(rs.is_valid() && is_int19(offset19)); | 2136 DCHECK(rs.is_valid() && is_int19(offset19)); |
2066 int32_t imm21 = LWUPC << kImm19Bits | (offset19 & kImm19Mask); | 2137 uint32_t imm21 = LWUPC << kImm19Bits | (offset19 & kImm19Mask); |
2067 GenInstrImmediate(PCREL, rs, imm21); | 2138 GenInstrImmediate(PCREL, rs, imm21); |
2068 } | 2139 } |
2069 | 2140 |
2070 | 2141 |
2071 void Assembler::ldpc(Register rs, int32_t offset18) { | 2142 void Assembler::ldpc(Register rs, int32_t offset18) { |
2072 DCHECK(kArchVariant == kMips64r6); | 2143 DCHECK(kArchVariant == kMips64r6); |
2073 DCHECK(rs.is_valid() && is_int18(offset18)); | 2144 DCHECK(rs.is_valid() && is_int18(offset18)); |
2074 int32_t imm21 = LDPC << kImm18Bits | (offset18 & kImm18Mask); | 2145 uint32_t imm21 = LDPC << kImm18Bits | (offset18 & kImm18Mask); |
2075 GenInstrImmediate(PCREL, rs, imm21); | 2146 GenInstrImmediate(PCREL, rs, imm21); |
2076 } | 2147 } |
2077 | 2148 |
2078 | 2149 |
2079 void Assembler::auipc(Register rs, int16_t imm16) { | 2150 void Assembler::auipc(Register rs, int16_t imm16) { |
2080 DCHECK(kArchVariant == kMips64r6); | 2151 DCHECK(kArchVariant == kMips64r6); |
2081 DCHECK(rs.is_valid() && is_int16(imm16)); | 2152 DCHECK(rs.is_valid()); |
2082 int32_t imm21 = AUIPC << kImm16Bits | (imm16 & kImm16Mask); | 2153 uint32_t imm21 = AUIPC << kImm16Bits | (imm16 & kImm16Mask); |
2083 GenInstrImmediate(PCREL, rs, imm21); | 2154 GenInstrImmediate(PCREL, rs, imm21); |
2084 } | 2155 } |
2085 | 2156 |
2086 | 2157 |
2087 void Assembler::aluipc(Register rs, int16_t imm16) { | 2158 void Assembler::aluipc(Register rs, int16_t imm16) { |
2088 DCHECK(kArchVariant == kMips64r6); | 2159 DCHECK(kArchVariant == kMips64r6); |
2089 DCHECK(rs.is_valid() && is_int16(imm16)); | 2160 DCHECK(rs.is_valid()); |
2090 int32_t imm21 = ALUIPC << kImm16Bits | (imm16 & kImm16Mask); | 2161 uint32_t imm21 = ALUIPC << kImm16Bits | (imm16 & kImm16Mask); |
2091 GenInstrImmediate(PCREL, rs, imm21); | 2162 GenInstrImmediate(PCREL, rs, imm21); |
2092 } | 2163 } |
2093 | 2164 |
2094 | 2165 |
2095 // -------------Misc-instructions-------------- | 2166 // -------------Misc-instructions-------------- |
2096 | 2167 |
2097 // Break / Trap instructions. | 2168 // Break / Trap instructions. |
2098 void Assembler::break_(uint32_t code, bool break_as_stop) { | 2169 void Assembler::break_(uint32_t code, bool break_as_stop) { |
2099 DCHECK((code & ~0xfffff) == 0); | 2170 DCHECK((code & ~0xfffff) == 0); |
2100 // We need to invalidate breaks that could be stops as well because the | 2171 // We need to invalidate breaks that could be stops as well because the |
(...skipping 859 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2960 } | 3031 } |
2961 | 3032 |
2962 | 3033 |
2963 void Assembler::bc1t(int16_t offset, uint16_t cc) { | 3034 void Assembler::bc1t(int16_t offset, uint16_t cc) { |
2964 DCHECK(is_uint3(cc)); | 3035 DCHECK(is_uint3(cc)); |
2965 Instr instr = COP1 | BC1 | cc << 18 | 1 << 16 | (offset & kImm16Mask); | 3036 Instr instr = COP1 | BC1 | cc << 18 | 1 << 16 | (offset & kImm16Mask); |
2966 emit(instr); | 3037 emit(instr); |
2967 } | 3038 } |
2968 | 3039 |
2969 | 3040 |
2970 // Debugging. | |
2971 int Assembler::RelocateInternalReference(RelocInfo::Mode rmode, byte* pc, | 3041 int Assembler::RelocateInternalReference(RelocInfo::Mode rmode, byte* pc, |
2972 intptr_t pc_delta) { | 3042 intptr_t pc_delta) { |
2973 if (RelocInfo::IsInternalReference(rmode)) { | 3043 if (RelocInfo::IsInternalReference(rmode)) { |
2974 int64_t* p = reinterpret_cast<int64_t*>(pc); | 3044 int64_t* p = reinterpret_cast<int64_t*>(pc); |
2975 if (*p == kEndOfJumpChain) { | 3045 if (*p == kEndOfJumpChain) { |
2976 return 0; // Number of instructions patched. | 3046 return 0; // Number of instructions patched. |
2977 } | 3047 } |
2978 *p += pc_delta; | 3048 *p += pc_delta; |
2979 return 2; // Number of instructions patched. | 3049 return 2; // Number of instructions patched. |
2980 } | 3050 } |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3014 // Regular j/jal relocation. | 3084 // Regular j/jal relocation. |
3015 uint32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; | 3085 uint32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; |
3016 imm28 += pc_delta; | 3086 imm28 += pc_delta; |
3017 imm28 &= kImm28Mask; | 3087 imm28 &= kImm28Mask; |
3018 instr &= ~kImm26Mask; | 3088 instr &= ~kImm26Mask; |
3019 DCHECK((imm28 & 3) == 0); | 3089 DCHECK((imm28 & 3) == 0); |
3020 uint32_t imm26 = static_cast<uint32_t>(imm28 >> 2); | 3090 uint32_t imm26 = static_cast<uint32_t>(imm28 >> 2); |
3021 instr_at_put(pc, instr | (imm26 & kImm26Mask)); | 3091 instr_at_put(pc, instr | (imm26 & kImm26Mask)); |
3022 return 1; // Number of instructions patched. | 3092 return 1; // Number of instructions patched. |
3023 } else { | 3093 } else { |
| 3094 DCHECK(((instr & kJumpRawMask) == kJRawMark) || |
| 3095 ((instr & kJumpRawMask) == kJalRawMark)); |
3024 // Unbox raw offset and emit j/jal. | 3096 // Unbox raw offset and emit j/jal. |
3025 int32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; | 3097 int32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; |
3026 // Sign extend 28-bit offset to 32-bit. | 3098 // Sign extend 28-bit offset to 32-bit. |
3027 imm28 = (imm28 << 4) >> 4; | 3099 imm28 = (imm28 << 4) >> 4; |
3028 uint64_t target = | 3100 uint64_t target = |
3029 static_cast<int64_t>(imm28) + reinterpret_cast<uint64_t>(pc); | 3101 static_cast<int64_t>(imm28) + reinterpret_cast<uint64_t>(pc); |
3030 target &= kImm28Mask; | 3102 target &= kImm28Mask; |
3031 DCHECK((imm28 & 3) == 0); | 3103 DCHECK((imm28 & 3) == 0); |
3032 uint32_t imm26 = static_cast<uint32_t>(target >> 2); | 3104 uint32_t imm26 = static_cast<uint32_t>(target >> 2); |
3033 // Check markings whether to emit j or jal. | 3105 // Check markings whether to emit j or jal. |
(...skipping 149 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3183 } | 3255 } |
3184 return; | 3256 return; |
3185 } | 3257 } |
3186 | 3258 |
3187 DCHECK(!trampoline_emitted_); | 3259 DCHECK(!trampoline_emitted_); |
3188 DCHECK(unbound_labels_count_ >= 0); | 3260 DCHECK(unbound_labels_count_ >= 0); |
3189 if (unbound_labels_count_ > 0) { | 3261 if (unbound_labels_count_ > 0) { |
3190 // First we emit jump (2 instructions), then we emit trampoline pool. | 3262 // First we emit jump (2 instructions), then we emit trampoline pool. |
3191 { BlockTrampolinePoolScope block_trampoline_pool(this); | 3263 { BlockTrampolinePoolScope block_trampoline_pool(this); |
3192 Label after_pool; | 3264 Label after_pool; |
3193 b(&after_pool); | 3265 if (kArchVariant == kMips64r6) { |
3194 nop(); | 3266 bc(&after_pool); |
| 3267 } else { |
| 3268 b(&after_pool); |
| 3269 nop(); |
| 3270 } |
3195 | 3271 |
| 3272 EmitForbiddenSlotInstruction(); |
3196 int pool_start = pc_offset(); | 3273 int pool_start = pc_offset(); |
3197 for (int i = 0; i < unbound_labels_count_; i++) { | 3274 for (int i = 0; i < unbound_labels_count_; i++) { |
3198 { BlockGrowBufferScope block_buf_growth(this); | 3275 { BlockGrowBufferScope block_buf_growth(this); |
3199 // Buffer growth (and relocation) must be blocked for internal | 3276 // Buffer growth (and relocation) must be blocked for internal |
3200 // references until associated instructions are emitted and available | 3277 // references until associated instructions are emitted and available |
3201 // to be patched. | 3278 // to be patched. |
3202 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 3279 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
3203 j(&after_pool); | 3280 j(&after_pool); |
3204 } | 3281 } |
3205 nop(); | 3282 nop(); |
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3303 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { | 3380 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { |
3304 Assembler::FlushICache(isolate, pc, 4 * Assembler::kInstrSize); | 3381 Assembler::FlushICache(isolate, pc, 4 * Assembler::kInstrSize); |
3305 } | 3382 } |
3306 } | 3383 } |
3307 | 3384 |
3308 | 3385 |
3309 } // namespace internal | 3386 } // namespace internal |
3310 } // namespace v8 | 3387 } // namespace v8 |
3311 | 3388 |
3312 #endif // V8_TARGET_ARCH_MIPS64 | 3389 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |