| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 393 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 404 // | 404 // |
| 405 // Label L; // unbound label | 405 // Label L; // unbound label |
| 406 // j(cc, &L); // forward branch to unbound label | 406 // j(cc, &L); // forward branch to unbound label |
| 407 // bind(&L); // bind label to the current pc | 407 // bind(&L); // bind label to the current pc |
| 408 // j(cc, &L); // backward branch to bound label | 408 // j(cc, &L); // backward branch to bound label |
| 409 // bind(&L); // illegal: a label may be bound only once | 409 // bind(&L); // illegal: a label may be bound only once |
| 410 // | 410 // |
| 411 // Note: The same Label can be used for forward and backward branches | 411 // Note: The same Label can be used for forward and backward branches |
| 412 // but it may be bound only once. | 412 // but it may be bound only once. |
| 413 void bind(Label* L); // Binds an unbound label L to current code position. | 413 void bind(Label* L); // Binds an unbound label L to current code position. |
| 414 |
| 415 enum OffsetSize : int { kOffset26 = 26, kOffset21 = 21, kOffset16 = 16 }; |
| 416 |
| 414 // Determines if Label is bound and near enough so that branch instruction | 417 // Determines if Label is bound and near enough so that branch instruction |
| 415 // can be used to reach it, instead of jump instruction. | 418 // can be used to reach it, instead of jump instruction. |
| 416 bool is_near(Label* L); | 419 bool is_near(Label* L); |
| 420 bool is_near(Label* L, OffsetSize bits); |
| 421 bool is_near_branch(Label* L); |
| 422 inline bool is_near_pre_r6(Label* L) { |
| 423 DCHECK(!(kArchVariant == kMips64r6)); |
| 424 return pc_offset() - L->pos() < kMaxBranchOffset - 4 * kInstrSize; |
| 425 } |
| 426 inline bool is_near_r6(Label* L) { |
| 427 DCHECK(kArchVariant == kMips64r6); |
| 428 return pc_offset() - L->pos() < kMaxCompactBranchOffset - 4 * kInstrSize; |
| 429 } |
| 430 |
| 431 int BranchOffset(Instr instr); |
| 417 | 432 |
| 418 // Returns the branch offset to the given label from the current code | 433 // Returns the branch offset to the given label from the current code |
| 419 // position. Links the label to the current position if it is still unbound. | 434 // position. Links the label to the current position if it is still unbound. |
| 420 // Manages the jump elimination optimization if the second parameter is true. | 435 // Manages the jump elimination optimization if the second parameter is true. |
| 421 int32_t branch_offset(Label* L, bool jump_elimination_allowed); | 436 int32_t branch_offset_helper(Label* L, OffsetSize bits); |
| 422 int32_t branch_offset_compact(Label* L, bool jump_elimination_allowed); | 437 inline int32_t branch_offset(Label* L) { |
| 423 int32_t branch_offset21(Label* L, bool jump_elimination_allowed); | 438 return branch_offset_helper(L, OffsetSize::kOffset16); |
| 424 int32_t branch_offset21_compact(Label* L, bool jump_elimination_allowed); | |
| 425 int32_t shifted_branch_offset(Label* L, bool jump_elimination_allowed) { | |
| 426 int32_t o = branch_offset(L, jump_elimination_allowed); | |
| 427 DCHECK((o & 3) == 0); // Assert the offset is aligned. | |
| 428 return o >> 2; | |
| 429 } | 439 } |
| 430 int32_t shifted_branch_offset_compact(Label* L, | 440 inline int32_t branch_offset21(Label* L) { |
| 431 bool jump_elimination_allowed) { | 441 return branch_offset_helper(L, OffsetSize::kOffset21); |
| 432 int32_t o = branch_offset_compact(L, jump_elimination_allowed); | 442 } |
| 433 DCHECK((o & 3) == 0); // Assert the offset is aligned. | 443 inline int32_t branch_offset26(Label* L) { |
| 434 return o >> 2; | 444 return branch_offset_helper(L, OffsetSize::kOffset26); |
| 445 } |
| 446 inline int32_t shifted_branch_offset(Label* L) { |
| 447 return branch_offset(L) >> 2; |
| 448 } |
| 449 inline int32_t shifted_branch_offset21(Label* L) { |
| 450 return branch_offset21(L) >> 2; |
| 451 } |
| 452 inline int32_t shifted_branch_offset26(Label* L) { |
| 453 return branch_offset26(L) >> 2; |
| 435 } | 454 } |
| 436 uint64_t jump_address(Label* L); | 455 uint64_t jump_address(Label* L); |
| 437 uint64_t jump_offset(Label* L); | 456 uint64_t jump_offset(Label* L); |
| 438 | 457 |
| 439 // Puts a labels target address at the given position. | 458 // Puts a labels target address at the given position. |
| 440 // The high 8 bits are set to zero. | 459 // The high 8 bits are set to zero. |
| 441 void label_at_put(Label* L, int at_offset); | 460 void label_at_put(Label* L, int at_offset); |
| 442 | 461 |
| 443 // Read/Modify the code target address in the branch/call instruction at pc. | 462 // Read/Modify the code target address in the branch/call instruction at pc. |
| 444 static Address target_address_at(Address pc); | 463 static Address target_address_at(Address pc); |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 565 void nop(unsigned int type = 0) { | 584 void nop(unsigned int type = 0) { |
| 566 DCHECK(type < 32); | 585 DCHECK(type < 32); |
| 567 Register nop_rt_reg = (type == 0) ? zero_reg : at; | 586 Register nop_rt_reg = (type == 0) ? zero_reg : at; |
| 568 sll(zero_reg, nop_rt_reg, type, true); | 587 sll(zero_reg, nop_rt_reg, type, true); |
| 569 } | 588 } |
| 570 | 589 |
| 571 | 590 |
| 572 // --------Branch-and-jump-instructions---------- | 591 // --------Branch-and-jump-instructions---------- |
| 573 // We don't use likely variant of instructions. | 592 // We don't use likely variant of instructions. |
| 574 void b(int16_t offset); | 593 void b(int16_t offset); |
| 575 void b(Label* L) { b(branch_offset(L, false)>>2); } | 594 inline void b(Label* L) { b(shifted_branch_offset(L)); } |
| 576 void bal(int16_t offset); | 595 void bal(int16_t offset); |
| 577 void bal(Label* L) { bal(branch_offset(L, false)>>2); } | 596 inline void bal(Label* L) { bal(shifted_branch_offset(L)); } |
| 578 void bc(int32_t offset); | 597 void bc(int32_t offset); |
| 579 void bc(Label* L) { bc(branch_offset(L, false) >> 2); } | 598 inline void bc(Label* L) { bc(shifted_branch_offset26(L)); } |
| 580 void balc(int32_t offset); | 599 void balc(int32_t offset); |
| 581 void balc(Label* L) { balc(branch_offset(L, false) >> 2); } | 600 inline void balc(Label* L) { balc(shifted_branch_offset26(L)); } |
| 582 | 601 |
| 583 void beq(Register rs, Register rt, int16_t offset); | 602 void beq(Register rs, Register rt, int16_t offset); |
| 584 void beq(Register rs, Register rt, Label* L) { | 603 inline void beq(Register rs, Register rt, Label* L) { |
| 585 beq(rs, rt, branch_offset(L, false) >> 2); | 604 beq(rs, rt, shifted_branch_offset(L)); |
| 586 } | 605 } |
| 587 void bgez(Register rs, int16_t offset); | 606 void bgez(Register rs, int16_t offset); |
| 588 void bgezc(Register rt, int16_t offset); | 607 void bgezc(Register rt, int16_t offset); |
| 589 void bgezc(Register rt, Label* L) { | 608 inline void bgezc(Register rt, Label* L) { |
| 590 bgezc(rt, branch_offset_compact(L, false)>>2); | 609 bgezc(rt, shifted_branch_offset(L)); |
| 591 } | 610 } |
| 592 void bgeuc(Register rs, Register rt, int16_t offset); | 611 void bgeuc(Register rs, Register rt, int16_t offset); |
| 593 void bgeuc(Register rs, Register rt, Label* L) { | 612 inline void bgeuc(Register rs, Register rt, Label* L) { |
| 594 bgeuc(rs, rt, branch_offset_compact(L, false)>>2); | 613 bgeuc(rs, rt, shifted_branch_offset(L)); |
| 595 } | 614 } |
| 596 void bgec(Register rs, Register rt, int16_t offset); | 615 void bgec(Register rs, Register rt, int16_t offset); |
| 597 void bgec(Register rs, Register rt, Label* L) { | 616 inline void bgec(Register rs, Register rt, Label* L) { |
| 598 bgec(rs, rt, branch_offset_compact(L, false)>>2); | 617 bgec(rs, rt, shifted_branch_offset(L)); |
| 599 } | 618 } |
| 600 void bgezal(Register rs, int16_t offset); | 619 void bgezal(Register rs, int16_t offset); |
| 601 void bgezalc(Register rt, int16_t offset); | 620 void bgezalc(Register rt, int16_t offset); |
| 602 void bgezalc(Register rt, Label* L) { | 621 inline void bgezalc(Register rt, Label* L) { |
| 603 bgezalc(rt, branch_offset_compact(L, false)>>2); | 622 bgezalc(rt, shifted_branch_offset(L)); |
| 604 } | 623 } |
| 605 void bgezall(Register rs, int16_t offset); | 624 void bgezall(Register rs, int16_t offset); |
| 606 void bgezall(Register rs, Label* L) { | 625 inline void bgezall(Register rs, Label* L) { |
| 607 bgezall(rs, branch_offset(L, false)>>2); | 626 bgezall(rs, branch_offset(L) >> 2); |
| 608 } | 627 } |
| 609 void bgtz(Register rs, int16_t offset); | 628 void bgtz(Register rs, int16_t offset); |
| 610 void bgtzc(Register rt, int16_t offset); | 629 void bgtzc(Register rt, int16_t offset); |
| 611 void bgtzc(Register rt, Label* L) { | 630 inline void bgtzc(Register rt, Label* L) { |
| 612 bgtzc(rt, branch_offset_compact(L, false)>>2); | 631 bgtzc(rt, shifted_branch_offset(L)); |
| 613 } | 632 } |
| 614 void blez(Register rs, int16_t offset); | 633 void blez(Register rs, int16_t offset); |
| 615 void blezc(Register rt, int16_t offset); | 634 void blezc(Register rt, int16_t offset); |
| 616 void blezc(Register rt, Label* L) { | 635 inline void blezc(Register rt, Label* L) { |
| 617 blezc(rt, branch_offset_compact(L, false)>>2); | 636 blezc(rt, shifted_branch_offset(L)); |
| 618 } | 637 } |
| 619 void bltz(Register rs, int16_t offset); | 638 void bltz(Register rs, int16_t offset); |
| 620 void bltzc(Register rt, int16_t offset); | 639 void bltzc(Register rt, int16_t offset); |
| 621 void bltzc(Register rt, Label* L) { | 640 inline void bltzc(Register rt, Label* L) { |
| 622 bltzc(rt, branch_offset_compact(L, false)>>2); | 641 bltzc(rt, shifted_branch_offset(L)); |
| 623 } | 642 } |
| 624 void bltuc(Register rs, Register rt, int16_t offset); | 643 void bltuc(Register rs, Register rt, int16_t offset); |
| 625 void bltuc(Register rs, Register rt, Label* L) { | 644 inline void bltuc(Register rs, Register rt, Label* L) { |
| 626 bltuc(rs, rt, branch_offset_compact(L, false)>>2); | 645 bltuc(rs, rt, shifted_branch_offset(L)); |
| 627 } | 646 } |
| 628 void bltc(Register rs, Register rt, int16_t offset); | 647 void bltc(Register rs, Register rt, int16_t offset); |
| 629 void bltc(Register rs, Register rt, Label* L) { | 648 inline void bltc(Register rs, Register rt, Label* L) { |
| 630 bltc(rs, rt, branch_offset_compact(L, false)>>2); | 649 bltc(rs, rt, shifted_branch_offset(L)); |
| 631 } | 650 } |
| 632 | |
| 633 void bltzal(Register rs, int16_t offset); | 651 void bltzal(Register rs, int16_t offset); |
| 634 void blezalc(Register rt, int16_t offset); | 652 void blezalc(Register rt, int16_t offset); |
| 635 void blezalc(Register rt, Label* L) { | 653 inline void blezalc(Register rt, Label* L) { |
| 636 blezalc(rt, branch_offset_compact(L, false)>>2); | 654 blezalc(rt, shifted_branch_offset(L)); |
| 637 } | 655 } |
| 638 void bltzalc(Register rt, int16_t offset); | 656 void bltzalc(Register rt, int16_t offset); |
| 639 void bltzalc(Register rt, Label* L) { | 657 inline void bltzalc(Register rt, Label* L) { |
| 640 bltzalc(rt, branch_offset_compact(L, false)>>2); | 658 bltzalc(rt, shifted_branch_offset(L)); |
| 641 } | 659 } |
| 642 void bgtzalc(Register rt, int16_t offset); | 660 void bgtzalc(Register rt, int16_t offset); |
| 643 void bgtzalc(Register rt, Label* L) { | 661 inline void bgtzalc(Register rt, Label* L) { |
| 644 bgtzalc(rt, branch_offset_compact(L, false)>>2); | 662 bgtzalc(rt, shifted_branch_offset(L)); |
| 645 } | 663 } |
| 646 void beqzalc(Register rt, int16_t offset); | 664 void beqzalc(Register rt, int16_t offset); |
| 647 void beqzalc(Register rt, Label* L) { | 665 inline void beqzalc(Register rt, Label* L) { |
| 648 beqzalc(rt, branch_offset_compact(L, false)>>2); | 666 beqzalc(rt, shifted_branch_offset(L)); |
| 649 } | 667 } |
| 650 void beqc(Register rs, Register rt, int16_t offset); | 668 void beqc(Register rs, Register rt, int16_t offset); |
| 651 void beqc(Register rs, Register rt, Label* L) { | 669 inline void beqc(Register rs, Register rt, Label* L) { |
| 652 beqc(rs, rt, branch_offset_compact(L, false)>>2); | 670 beqc(rs, rt, shifted_branch_offset(L)); |
| 653 } | 671 } |
| 654 void beqzc(Register rs, int32_t offset); | 672 void beqzc(Register rs, int32_t offset); |
| 655 void beqzc(Register rs, Label* L) { | 673 inline void beqzc(Register rs, Label* L) { |
| 656 beqzc(rs, branch_offset21_compact(L, false)>>2); | 674 beqzc(rs, shifted_branch_offset21(L)); |
| 657 } | 675 } |
| 658 void bnezalc(Register rt, int16_t offset); | 676 void bnezalc(Register rt, int16_t offset); |
| 659 void bnezalc(Register rt, Label* L) { | 677 inline void bnezalc(Register rt, Label* L) { |
| 660 bnezalc(rt, branch_offset_compact(L, false)>>2); | 678 bnezalc(rt, shifted_branch_offset(L)); |
| 661 } | 679 } |
| 662 void bnec(Register rs, Register rt, int16_t offset); | 680 void bnec(Register rs, Register rt, int16_t offset); |
| 663 void bnec(Register rs, Register rt, Label* L) { | 681 inline void bnec(Register rs, Register rt, Label* L) { |
| 664 bnec(rs, rt, branch_offset_compact(L, false)>>2); | 682 bnec(rs, rt, shifted_branch_offset(L)); |
| 665 } | 683 } |
| 666 void bnezc(Register rt, int32_t offset); | 684 void bnezc(Register rt, int32_t offset); |
| 667 void bnezc(Register rt, Label* L) { | 685 inline void bnezc(Register rt, Label* L) { |
| 668 bnezc(rt, branch_offset21_compact(L, false)>>2); | 686 bnezc(rt, shifted_branch_offset21(L)); |
| 669 } | 687 } |
| 670 void bne(Register rs, Register rt, int16_t offset); | 688 void bne(Register rs, Register rt, int16_t offset); |
| 671 void bne(Register rs, Register rt, Label* L) { | 689 inline void bne(Register rs, Register rt, Label* L) { |
| 672 bne(rs, rt, branch_offset(L, false)>>2); | 690 bne(rs, rt, shifted_branch_offset(L)); |
| 673 } | 691 } |
| 674 void bovc(Register rs, Register rt, int16_t offset); | 692 void bovc(Register rs, Register rt, int16_t offset); |
| 675 void bovc(Register rs, Register rt, Label* L) { | 693 inline void bovc(Register rs, Register rt, Label* L) { |
| 676 bovc(rs, rt, branch_offset_compact(L, false)>>2); | 694 bovc(rs, rt, shifted_branch_offset(L)); |
| 677 } | 695 } |
| 678 void bnvc(Register rs, Register rt, int16_t offset); | 696 void bnvc(Register rs, Register rt, int16_t offset); |
| 679 void bnvc(Register rs, Register rt, Label* L) { | 697 inline void bnvc(Register rs, Register rt, Label* L) { |
| 680 bnvc(rs, rt, branch_offset_compact(L, false)>>2); | 698 bnvc(rs, rt, shifted_branch_offset(L)); |
| 681 } | 699 } |
| 682 | 700 |
| 683 // Never use the int16_t b(l)cond version with a branch offset | 701 // Never use the int16_t b(l)cond version with a branch offset |
| 684 // instead of using the Label* version. | 702 // instead of using the Label* version. |
| 685 | 703 |
| 686 // Jump targets must be in the current 256 MB-aligned region. i.e. 28 bits. | 704 // Jump targets must be in the current 256 MB-aligned region. i.e. 28 bits. |
| 687 void j(int64_t target); | 705 void j(int64_t target); |
| 688 void jal(int64_t target); | 706 void jal(int64_t target); |
| 689 void j(Label* target); | 707 void j(Label* target); |
| 690 void jal(Label* target); | 708 void jal(Label* target); |
| (...skipping 277 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 968 void cvt_d_l(FPURegister fd, FPURegister fs); | 986 void cvt_d_l(FPURegister fd, FPURegister fs); |
| 969 void cvt_d_s(FPURegister fd, FPURegister fs); | 987 void cvt_d_s(FPURegister fd, FPURegister fs); |
| 970 | 988 |
| 971 // Conditions and branches for MIPSr6. | 989 // Conditions and branches for MIPSr6. |
| 972 void cmp(FPUCondition cond, SecondaryField fmt, | 990 void cmp(FPUCondition cond, SecondaryField fmt, |
| 973 FPURegister fd, FPURegister ft, FPURegister fs); | 991 FPURegister fd, FPURegister ft, FPURegister fs); |
| 974 void cmp_s(FPUCondition cond, FPURegister fd, FPURegister fs, FPURegister ft); | 992 void cmp_s(FPUCondition cond, FPURegister fd, FPURegister fs, FPURegister ft); |
| 975 void cmp_d(FPUCondition cond, FPURegister fd, FPURegister fs, FPURegister ft); | 993 void cmp_d(FPUCondition cond, FPURegister fd, FPURegister fs, FPURegister ft); |
| 976 | 994 |
| 977 void bc1eqz(int16_t offset, FPURegister ft); | 995 void bc1eqz(int16_t offset, FPURegister ft); |
| 978 void bc1eqz(Label* L, FPURegister ft) { | 996 inline void bc1eqz(Label* L, FPURegister ft) { |
| 979 bc1eqz(branch_offset(L, false)>>2, ft); | 997 bc1eqz(shifted_branch_offset(L), ft); |
| 980 } | 998 } |
| 981 void bc1nez(int16_t offset, FPURegister ft); | 999 void bc1nez(int16_t offset, FPURegister ft); |
| 982 void bc1nez(Label* L, FPURegister ft) { | 1000 inline void bc1nez(Label* L, FPURegister ft) { |
| 983 bc1nez(branch_offset(L, false)>>2, ft); | 1001 bc1nez(shifted_branch_offset(L), ft); |
| 984 } | 1002 } |
| 985 | 1003 |
| 986 // Conditions and branches for non MIPSr6. | 1004 // Conditions and branches for non MIPSr6. |
| 987 void c(FPUCondition cond, SecondaryField fmt, | 1005 void c(FPUCondition cond, SecondaryField fmt, |
| 988 FPURegister ft, FPURegister fs, uint16_t cc = 0); | 1006 FPURegister ft, FPURegister fs, uint16_t cc = 0); |
| 989 void c_s(FPUCondition cond, FPURegister ft, FPURegister fs, uint16_t cc = 0); | 1007 void c_s(FPUCondition cond, FPURegister ft, FPURegister fs, uint16_t cc = 0); |
| 990 void c_d(FPUCondition cond, FPURegister ft, FPURegister fs, uint16_t cc = 0); | 1008 void c_d(FPUCondition cond, FPURegister ft, FPURegister fs, uint16_t cc = 0); |
| 991 | 1009 |
| 992 void bc1f(int16_t offset, uint16_t cc = 0); | 1010 void bc1f(int16_t offset, uint16_t cc = 0); |
| 993 void bc1f(Label* L, uint16_t cc = 0) { | 1011 inline void bc1f(Label* L, uint16_t cc = 0) { |
| 994 bc1f(branch_offset(L, false)>>2, cc); | 1012 bc1f(shifted_branch_offset(L), cc); |
| 995 } | 1013 } |
| 996 void bc1t(int16_t offset, uint16_t cc = 0); | 1014 void bc1t(int16_t offset, uint16_t cc = 0); |
| 997 void bc1t(Label* L, uint16_t cc = 0) { | 1015 inline void bc1t(Label* L, uint16_t cc = 0) { |
| 998 bc1t(branch_offset(L, false)>>2, cc); | 1016 bc1t(shifted_branch_offset(L), cc); |
| 999 } | 1017 } |
| 1000 void fcmp(FPURegister src1, const double src2, FPUCondition cond); | 1018 void fcmp(FPURegister src1, const double src2, FPUCondition cond); |
| 1001 | 1019 |
| 1002 // Check the code size generated from label to here. | 1020 // Check the code size generated from label to here. |
| 1003 int SizeOfCodeGeneratedSince(Label* label) { | 1021 int SizeOfCodeGeneratedSince(Label* label) { |
| 1004 return pc_offset() - label->pos(); | 1022 return pc_offset() - label->pos(); |
| 1005 } | 1023 } |
| 1006 | 1024 |
| 1007 // Check the number of instructions generated from label to here. | 1025 // Check the number of instructions generated from label to here. |
| 1008 int InstructionsGeneratedSince(Label* label) { | 1026 int InstructionsGeneratedSince(Label* label) { |
| (...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1109 static void instr_at_put(byte* pc, Instr instr) { | 1127 static void instr_at_put(byte* pc, Instr instr) { |
| 1110 *reinterpret_cast<Instr*>(pc) = instr; | 1128 *reinterpret_cast<Instr*>(pc) = instr; |
| 1111 } | 1129 } |
| 1112 Instr instr_at(int pos) { return *reinterpret_cast<Instr*>(buffer_ + pos); } | 1130 Instr instr_at(int pos) { return *reinterpret_cast<Instr*>(buffer_ + pos); } |
| 1113 void instr_at_put(int pos, Instr instr) { | 1131 void instr_at_put(int pos, Instr instr) { |
| 1114 *reinterpret_cast<Instr*>(buffer_ + pos) = instr; | 1132 *reinterpret_cast<Instr*>(buffer_ + pos) = instr; |
| 1115 } | 1133 } |
| 1116 | 1134 |
| 1117 // Check if an instruction is a branch of some kind. | 1135 // Check if an instruction is a branch of some kind. |
| 1118 static bool IsBranch(Instr instr); | 1136 static bool IsBranch(Instr instr); |
| 1137 static bool IsBc(Instr instr); |
| 1138 static bool IsBzc(Instr instr); |
| 1139 |
| 1119 static bool IsBeq(Instr instr); | 1140 static bool IsBeq(Instr instr); |
| 1120 static bool IsBne(Instr instr); | 1141 static bool IsBne(Instr instr); |
| 1142 static bool IsBeqzc(Instr instr); |
| 1143 static bool IsBnezc(Instr instr); |
| 1144 static bool IsBeqc(Instr instr); |
| 1145 static bool IsBnec(Instr instr); |
| 1146 |
| 1121 | 1147 |
| 1122 static bool IsJump(Instr instr); | 1148 static bool IsJump(Instr instr); |
| 1123 static bool IsJ(Instr instr); | 1149 static bool IsJ(Instr instr); |
| 1124 static bool IsLui(Instr instr); | 1150 static bool IsLui(Instr instr); |
| 1125 static bool IsOri(Instr instr); | 1151 static bool IsOri(Instr instr); |
| 1126 | 1152 |
| 1127 static bool IsJal(Instr instr); | 1153 static bool IsJal(Instr instr); |
| 1128 static bool IsJr(Instr instr); | 1154 static bool IsJr(Instr instr); |
| 1129 static bool IsJalr(Instr instr); | 1155 static bool IsJalr(Instr instr); |
| 1130 | 1156 |
| (...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1169 | 1195 |
| 1170 void CheckTrampolinePool(); | 1196 void CheckTrampolinePool(); |
| 1171 | 1197 |
| 1172 void PatchConstantPoolAccessInstruction(int pc_offset, int offset, | 1198 void PatchConstantPoolAccessInstruction(int pc_offset, int offset, |
| 1173 ConstantPoolEntry::Access access, | 1199 ConstantPoolEntry::Access access, |
| 1174 ConstantPoolEntry::Type type) { | 1200 ConstantPoolEntry::Type type) { |
| 1175 // No embedded constant pool support. | 1201 // No embedded constant pool support. |
| 1176 UNREACHABLE(); | 1202 UNREACHABLE(); |
| 1177 } | 1203 } |
| 1178 | 1204 |
| 1205 bool IsPrevInstrCompactBranch() { return prev_instr_compact_branch_; } |
| 1206 |
| 1179 protected: | 1207 protected: |
| 1180 // Relocation for a type-recording IC has the AST id added to it. This | 1208 // Relocation for a type-recording IC has the AST id added to it. This |
| 1181 // member variable is a way to pass the information from the call site to | 1209 // member variable is a way to pass the information from the call site to |
| 1182 // the relocation info. | 1210 // the relocation info. |
| 1183 TypeFeedbackId recorded_ast_id_; | 1211 TypeFeedbackId recorded_ast_id_; |
| 1184 | 1212 |
| 1185 inline static void set_target_internal_reference_encoded_at(Address pc, | 1213 inline static void set_target_internal_reference_encoded_at(Address pc, |
| 1186 Address target); | 1214 Address target); |
| 1187 | 1215 |
| 1188 int64_t buffer_space() const { return reloc_info_writer.pos() - pc_; } | 1216 int64_t buffer_space() const { return reloc_info_writer.pos() - pc_; } |
| (...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1235 | 1263 |
| 1236 void EndBlockGrowBuffer() { | 1264 void EndBlockGrowBuffer() { |
| 1237 DCHECK(block_buffer_growth_); | 1265 DCHECK(block_buffer_growth_); |
| 1238 block_buffer_growth_ = false; | 1266 block_buffer_growth_ = false; |
| 1239 } | 1267 } |
| 1240 | 1268 |
| 1241 bool is_buffer_growth_blocked() const { | 1269 bool is_buffer_growth_blocked() const { |
| 1242 return block_buffer_growth_; | 1270 return block_buffer_growth_; |
| 1243 } | 1271 } |
| 1244 | 1272 |
| 1273 void EmitForbiddenSlotInstruction() { |
| 1274 if (IsPrevInstrCompactBranch()) { |
| 1275 nop(); |
| 1276 ClearCompactBranchState(); |
| 1277 } |
| 1278 } |
| 1279 |
| 1280 inline void CheckTrampolinePoolQuick(int extra_instructions = 0); |
| 1281 |
| 1245 private: | 1282 private: |
| 1246 // Buffer size and constant pool distance are checked together at regular | 1283 // Buffer size and constant pool distance are checked together at regular |
| 1247 // intervals of kBufferCheckInterval emitted bytes. | 1284 // intervals of kBufferCheckInterval emitted bytes. |
| 1248 static const int kBufferCheckInterval = 1*KB/2; | 1285 static const int kBufferCheckInterval = 1*KB/2; |
| 1249 | 1286 |
| 1250 // Code generation. | 1287 // Code generation. |
| 1251 // The relocation writer's position is at least kGap bytes below the end of | 1288 // The relocation writer's position is at least kGap bytes below the end of |
| 1252 // the generated instructions. This is so that multi-instruction sequences do | 1289 // the generated instructions. This is so that multi-instruction sequences do |
| 1253 // not have to check for overflow. The same is true for writes of large | 1290 // not have to check for overflow. The same is true for writes of large |
| 1254 // relocation info entries. | 1291 // relocation info entries. |
| (...skipping 19 matching lines...) Expand all Loading... |
| 1274 bool block_buffer_growth_; // Block growth when true. | 1311 bool block_buffer_growth_; // Block growth when true. |
| 1275 | 1312 |
| 1276 // Relocation information generation. | 1313 // Relocation information generation. |
| 1277 // Each relocation is encoded as a variable size value. | 1314 // Each relocation is encoded as a variable size value. |
| 1278 static const int kMaxRelocSize = RelocInfoWriter::kMaxSize; | 1315 static const int kMaxRelocSize = RelocInfoWriter::kMaxSize; |
| 1279 RelocInfoWriter reloc_info_writer; | 1316 RelocInfoWriter reloc_info_writer; |
| 1280 | 1317 |
| 1281 // The bound position, before this we cannot do instruction elimination. | 1318 // The bound position, before this we cannot do instruction elimination. |
| 1282 int last_bound_pos_; | 1319 int last_bound_pos_; |
| 1283 | 1320 |
| 1321 // Readable constants for compact branch handling in emit() |
| 1322 enum class CompactBranchType : bool { NO = false, COMPACT_BRANCH = true }; |
| 1323 |
| 1284 // Code emission. | 1324 // Code emission. |
| 1285 inline void CheckBuffer(); | 1325 inline void CheckBuffer(); |
| 1286 void GrowBuffer(); | 1326 void GrowBuffer(); |
| 1287 inline void emit(Instr x); | 1327 inline void emit(Instr x, |
| 1328 CompactBranchType is_compact_branch = CompactBranchType::NO); |
| 1288 inline void emit(uint64_t x); | 1329 inline void emit(uint64_t x); |
| 1289 inline void CheckTrampolinePoolQuick(int extra_instructions = 0); | |
| 1290 | 1330 |
| 1291 // Instruction generation. | 1331 // Instruction generation. |
| 1292 // We have 3 different kind of encoding layout on MIPS. | 1332 // We have 3 different kind of encoding layout on MIPS. |
| 1293 // However due to many different types of objects encoded in the same fields | 1333 // However due to many different types of objects encoded in the same fields |
| 1294 // we have quite a few aliases for each mode. | 1334 // we have quite a few aliases for each mode. |
| 1295 // Using the same structure to refer to Register and FPURegister would spare a | 1335 // Using the same structure to refer to Register and FPURegister would spare a |
| 1296 // few aliases, but mixing both does not look clean to me. | 1336 // few aliases, but mixing both does not look clean to me. |
| 1297 // Anyway we could surely implement this differently. | 1337 // Anyway we could surely implement this differently. |
| 1298 | 1338 |
| 1299 void GenInstrRegister(Opcode opcode, | 1339 void GenInstrRegister(Opcode opcode, |
| (...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1331 FPURegister fd, | 1371 FPURegister fd, |
| 1332 SecondaryField func = NULLSF); | 1372 SecondaryField func = NULLSF); |
| 1333 | 1373 |
| 1334 void GenInstrRegister(Opcode opcode, | 1374 void GenInstrRegister(Opcode opcode, |
| 1335 SecondaryField fmt, | 1375 SecondaryField fmt, |
| 1336 Register rt, | 1376 Register rt, |
| 1337 FPUControlRegister fs, | 1377 FPUControlRegister fs, |
| 1338 SecondaryField func = NULLSF); | 1378 SecondaryField func = NULLSF); |
| 1339 | 1379 |
| 1340 | 1380 |
| 1341 void GenInstrImmediate(Opcode opcode, | 1381 void GenInstrImmediate( |
| 1342 Register rs, | 1382 Opcode opcode, Register rs, Register rt, int32_t j, |
| 1343 Register rt, | 1383 CompactBranchType is_compact_branch = CompactBranchType::NO); |
| 1344 int32_t j); | 1384 void GenInstrImmediate( |
| 1345 void GenInstrImmediate(Opcode opcode, | 1385 Opcode opcode, Register rs, SecondaryField SF, int32_t j, |
| 1346 Register rs, | 1386 CompactBranchType is_compact_branch = CompactBranchType::NO); |
| 1347 SecondaryField SF, | 1387 void GenInstrImmediate( |
| 1348 int32_t j); | 1388 Opcode opcode, Register r1, FPURegister r2, int32_t j, |
| 1349 void GenInstrImmediate(Opcode opcode, | 1389 CompactBranchType is_compact_branch = CompactBranchType::NO); |
| 1350 Register r1, | 1390 void GenInstrImmediate( |
| 1351 FPURegister r2, | 1391 Opcode opcode, Register rs, int32_t offset21, |
| 1352 int32_t j); | 1392 CompactBranchType is_compact_branch = CompactBranchType::NO); |
| 1353 void GenInstrImmediate(Opcode opcode, Register rs, int32_t j); | 1393 void GenInstrImmediate(Opcode opcode, Register rs, uint32_t offset21); |
| 1354 void GenInstrImmediate(Opcode opcode, int32_t offset26); | 1394 void GenInstrImmediate( |
| 1355 | 1395 Opcode opcode, int32_t offset26, |
| 1396 CompactBranchType is_compact_branch = CompactBranchType::NO); |
| 1356 | 1397 |
| 1357 void GenInstrJump(Opcode opcode, | 1398 void GenInstrJump(Opcode opcode, |
| 1358 uint32_t address); | 1399 uint32_t address); |
| 1359 | 1400 |
| 1360 // Helpers. | 1401 // Helpers. |
| 1361 void LoadRegPlusOffsetToAt(const MemOperand& src); | 1402 void LoadRegPlusOffsetToAt(const MemOperand& src); |
| 1362 | 1403 |
| 1363 // Labels. | 1404 // Labels. |
| 1364 void print(Label* L); | 1405 void print(Label* L); |
| 1365 void bind_to(Label* L, int pos); | 1406 void bind_to(Label* L, int pos); |
| (...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1419 int32_t get_trampoline_entry(int32_t pos); | 1460 int32_t get_trampoline_entry(int32_t pos); |
| 1420 int unbound_labels_count_; | 1461 int unbound_labels_count_; |
| 1421 // After trampoline is emitted, long branches are used in generated code for | 1462 // After trampoline is emitted, long branches are used in generated code for |
| 1422 // the forward branches whose target offsets could be beyond reach of branch | 1463 // the forward branches whose target offsets could be beyond reach of branch |
| 1423 // instruction. We use this information to trigger different mode of | 1464 // instruction. We use this information to trigger different mode of |
| 1424 // branch instruction generation, where we use jump instructions rather | 1465 // branch instruction generation, where we use jump instructions rather |
| 1425 // than regular branch instructions. | 1466 // than regular branch instructions. |
| 1426 bool trampoline_emitted_; | 1467 bool trampoline_emitted_; |
| 1427 static const int kTrampolineSlotsSize = 2 * kInstrSize; | 1468 static const int kTrampolineSlotsSize = 2 * kInstrSize; |
| 1428 static const int kMaxBranchOffset = (1 << (18 - 1)) - 1; | 1469 static const int kMaxBranchOffset = (1 << (18 - 1)) - 1; |
| 1470 static const int kMaxCompactBranchOffset = (1 << (28 - 1)) - 1; |
| 1429 static const int kInvalidSlotPos = -1; | 1471 static const int kInvalidSlotPos = -1; |
| 1430 | 1472 |
| 1431 // Internal reference positions, required for unbounded internal reference | 1473 // Internal reference positions, required for unbounded internal reference |
| 1432 // labels. | 1474 // labels. |
| 1433 std::set<int64_t> internal_reference_positions_; | 1475 std::set<int64_t> internal_reference_positions_; |
| 1434 | 1476 |
| 1477 void EmittedCompactBranchInstruction() { prev_instr_compact_branch_ = true; } |
| 1478 void ClearCompactBranchState() { prev_instr_compact_branch_ = false; } |
| 1479 bool prev_instr_compact_branch_ = false; |
| 1480 |
| 1435 Trampoline trampoline_; | 1481 Trampoline trampoline_; |
| 1436 bool internal_trampoline_exception_; | 1482 bool internal_trampoline_exception_; |
| 1437 | 1483 |
| 1438 friend class RegExpMacroAssemblerMIPS; | 1484 friend class RegExpMacroAssemblerMIPS; |
| 1439 friend class RelocInfo; | 1485 friend class RelocInfo; |
| 1440 friend class CodePatcher; | 1486 friend class CodePatcher; |
| 1441 friend class BlockTrampolinePoolScope; | 1487 friend class BlockTrampolinePoolScope; |
| 1442 | 1488 |
| 1443 PositionsRecorder positions_recorder_; | 1489 PositionsRecorder positions_recorder_; |
| 1444 friend class PositionsRecorder; | 1490 friend class PositionsRecorder; |
| 1445 friend class EnsureSpace; | 1491 friend class EnsureSpace; |
| 1446 }; | 1492 }; |
| 1447 | 1493 |
| 1448 | 1494 |
| 1449 class EnsureSpace BASE_EMBEDDED { | 1495 class EnsureSpace BASE_EMBEDDED { |
| 1450 public: | 1496 public: |
| 1451 explicit EnsureSpace(Assembler* assembler) { | 1497 explicit EnsureSpace(Assembler* assembler) { |
| 1452 assembler->CheckBuffer(); | 1498 assembler->CheckBuffer(); |
| 1453 } | 1499 } |
| 1454 }; | 1500 }; |
| 1455 | 1501 |
| 1456 } // namespace internal | 1502 } // namespace internal |
| 1457 } // namespace v8 | 1503 } // namespace v8 |
| 1458 | 1504 |
| 1459 #endif // V8_ARM_ASSEMBLER_MIPS_H_ | 1505 #endif // V8_ARM_ASSEMBLER_MIPS_H_ |
| OLD | NEW |