| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 157 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 168 ra | 168 ra |
| 169 }; | 169 }; |
| 170 return kRegisters[num]; | 170 return kRegisters[num]; |
| 171 } | 171 } |
| 172 | 172 |
| 173 | 173 |
| 174 // ----------------------------------------------------------------------------- | 174 // ----------------------------------------------------------------------------- |
| 175 // Implementation of RelocInfo. | 175 // Implementation of RelocInfo. |
| 176 | 176 |
| 177 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | | 177 const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask | |
| 178 1 << RelocInfo::INTERNAL_REFERENCE; | 178 1 << RelocInfo::INTERNAL_REFERENCE | |
| 179 1 << RelocInfo::INTERNAL_REFERENCE_ENCODED; |
| 179 | 180 |
| 180 | 181 |
| 181 bool RelocInfo::IsCodedSpecially() { | 182 bool RelocInfo::IsCodedSpecially() { |
| 182 // The deserializer needs to know whether a pointer is specially coded. Being | 183 // The deserializer needs to know whether a pointer is specially coded. Being |
| 183 // specially coded on MIPS means that it is a lui/ori instruction, and that is | 184 // specially coded on MIPS means that it is a lui/ori instruction, and that is |
| 184 // always the case inside code objects. | 185 // always the case inside code objects. |
| 185 return true; | 186 return true; |
| 186 } | 187 } |
| 187 | 188 |
| 188 | 189 |
| (...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 627 DCHECK(IsAddImmediate(instr)); | 628 DCHECK(IsAddImmediate(instr)); |
| 628 return ((instr & ~kImm16Mask) | (offset & kImm16Mask)); | 629 return ((instr & ~kImm16Mask) | (offset & kImm16Mask)); |
| 629 } | 630 } |
| 630 | 631 |
| 631 | 632 |
| 632 bool Assembler::IsAndImmediate(Instr instr) { | 633 bool Assembler::IsAndImmediate(Instr instr) { |
| 633 return GetOpcodeField(instr) == ANDI; | 634 return GetOpcodeField(instr) == ANDI; |
| 634 } | 635 } |
| 635 | 636 |
| 636 | 637 |
| 637 int64_t Assembler::target_at(int64_t pos) { | 638 int64_t Assembler::target_at(int64_t pos, bool is_internal) { |
| 639 if (is_internal) { |
| 640 int64_t* p = reinterpret_cast<int64_t*>(buffer_ + pos); |
| 641 int64_t address = *p; |
| 642 if (address == kEndOfJumpChain) { |
| 643 return kEndOfChain; |
| 644 } else { |
| 645 int64_t instr_address = reinterpret_cast<int64_t>(p); |
| 646 int64_t delta = instr_address - address; |
| 647 DCHECK(pos > delta); |
| 648 return pos - delta; |
| 649 } |
| 650 } |
| 638 Instr instr = instr_at(pos); | 651 Instr instr = instr_at(pos); |
| 639 if ((instr & ~kImm16Mask) == 0) { | 652 if ((instr & ~kImm16Mask) == 0) { |
| 640 // Emitted label constant, not part of a branch. | 653 // Emitted label constant, not part of a branch. |
| 641 if (instr == 0) { | 654 if (instr == 0) { |
| 642 return kEndOfChain; | 655 return kEndOfChain; |
| 643 } else { | 656 } else { |
| 644 int32_t imm18 =((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; | 657 int32_t imm18 =((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; |
| 645 return (imm18 + pos); | 658 return (imm18 + pos); |
| 646 } | 659 } |
| 647 } | 660 } |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 689 uint64_t instr_address = reinterpret_cast<int64_t>(buffer_ + pos); | 702 uint64_t instr_address = reinterpret_cast<int64_t>(buffer_ + pos); |
| 690 instr_address &= kImm28Mask; | 703 instr_address &= kImm28Mask; |
| 691 int64_t delta = instr_address - imm28; | 704 int64_t delta = instr_address - imm28; |
| 692 DCHECK(pos > delta); | 705 DCHECK(pos > delta); |
| 693 return pos - delta; | 706 return pos - delta; |
| 694 } | 707 } |
| 695 } | 708 } |
| 696 } | 709 } |
| 697 | 710 |
| 698 | 711 |
| 699 void Assembler::target_at_put(int64_t pos, int64_t target_pos) { | 712 void Assembler::target_at_put(int64_t pos, int64_t target_pos, |
| 713 bool is_internal) { |
| 714 if (is_internal) { |
| 715 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
| 716 *reinterpret_cast<uint64_t*>(buffer_ + pos) = imm; |
| 717 return; |
| 718 } |
| 700 Instr instr = instr_at(pos); | 719 Instr instr = instr_at(pos); |
| 701 if ((instr & ~kImm16Mask) == 0) { | 720 if ((instr & ~kImm16Mask) == 0) { |
| 702 DCHECK(target_pos == kEndOfChain || target_pos >= 0); | 721 DCHECK(target_pos == kEndOfChain || target_pos >= 0); |
| 703 // Emitted label constant, not part of a branch. | 722 // Emitted label constant, not part of a branch. |
| 704 // Make label relative to Code* of generated Code object. | 723 // Make label relative to Code* of generated Code object. |
| 705 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag)); | 724 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag)); |
| 706 return; | 725 return; |
| 707 } | 726 } |
| 708 | 727 |
| 709 DCHECK(IsBranch(instr) || IsJ(instr) || IsLui(instr)); | 728 DCHECK(IsBranch(instr) || IsJ(instr) || IsLui(instr)); |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 759 Label l = *L; | 778 Label l = *L; |
| 760 PrintF("unbound label"); | 779 PrintF("unbound label"); |
| 761 while (l.is_linked()) { | 780 while (l.is_linked()) { |
| 762 PrintF("@ %d ", l.pos()); | 781 PrintF("@ %d ", l.pos()); |
| 763 Instr instr = instr_at(l.pos()); | 782 Instr instr = instr_at(l.pos()); |
| 764 if ((instr & ~kImm16Mask) == 0) { | 783 if ((instr & ~kImm16Mask) == 0) { |
| 765 PrintF("value\n"); | 784 PrintF("value\n"); |
| 766 } else { | 785 } else { |
| 767 PrintF("%d\n", instr); | 786 PrintF("%d\n", instr); |
| 768 } | 787 } |
| 769 next(&l); | 788 next(&l, internal_reference_positions_.find(l.pos()) != |
| 789 internal_reference_positions_.end()); |
| 770 } | 790 } |
| 771 } else { | 791 } else { |
| 772 PrintF("label in inconsistent state (pos = %d)\n", L->pos_); | 792 PrintF("label in inconsistent state (pos = %d)\n", L->pos_); |
| 773 } | 793 } |
| 774 } | 794 } |
| 775 | 795 |
| 776 | 796 |
| 777 void Assembler::bind_to(Label* L, int pos) { | 797 void Assembler::bind_to(Label* L, int pos) { |
| 778 DCHECK(0 <= pos && pos <= pc_offset()); // Must have valid binding position. | 798 DCHECK(0 <= pos && pos <= pc_offset()); // Must have valid binding position. |
| 779 int32_t trampoline_pos = kInvalidSlotPos; | 799 int32_t trampoline_pos = kInvalidSlotPos; |
| 800 bool is_internal = false; |
| 780 if (L->is_linked() && !trampoline_emitted_) { | 801 if (L->is_linked() && !trampoline_emitted_) { |
| 781 unbound_labels_count_--; | 802 unbound_labels_count_--; |
| 782 next_buffer_check_ += kTrampolineSlotsSize; | 803 next_buffer_check_ += kTrampolineSlotsSize; |
| 783 } | 804 } |
| 784 | 805 |
| 785 while (L->is_linked()) { | 806 while (L->is_linked()) { |
| 786 int32_t fixup_pos = L->pos(); | 807 int32_t fixup_pos = L->pos(); |
| 787 int32_t dist = pos - fixup_pos; | 808 int32_t dist = pos - fixup_pos; |
| 788 next(L); // Call next before overwriting link with target at fixup_pos. | 809 is_internal = internal_reference_positions_.find(fixup_pos) != |
| 810 internal_reference_positions_.end(); |
| 811 next(L, is_internal); // Call next before overwriting link with target at |
| 812 // fixup_pos. |
| 789 Instr instr = instr_at(fixup_pos); | 813 Instr instr = instr_at(fixup_pos); |
| 790 if (IsBranch(instr)) { | 814 if (is_internal) { |
| 815 target_at_put(fixup_pos, pos, is_internal); |
| 816 } else if (IsBranch(instr)) { |
| 791 if (dist > kMaxBranchOffset) { | 817 if (dist > kMaxBranchOffset) { |
| 792 if (trampoline_pos == kInvalidSlotPos) { | 818 if (trampoline_pos == kInvalidSlotPos) { |
| 793 trampoline_pos = get_trampoline_entry(fixup_pos); | 819 trampoline_pos = get_trampoline_entry(fixup_pos); |
| 794 CHECK(trampoline_pos != kInvalidSlotPos); | 820 CHECK(trampoline_pos != kInvalidSlotPos); |
| 795 } | 821 } |
| 796 DCHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset); | 822 DCHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset); |
| 797 target_at_put(fixup_pos, trampoline_pos); | 823 target_at_put(fixup_pos, trampoline_pos, false); |
| 798 fixup_pos = trampoline_pos; | 824 fixup_pos = trampoline_pos; |
| 799 dist = pos - fixup_pos; | 825 dist = pos - fixup_pos; |
| 800 } | 826 } |
| 801 target_at_put(fixup_pos, pos); | 827 target_at_put(fixup_pos, pos, false); |
| 802 } else { | 828 } else { |
| 803 DCHECK(IsJ(instr) || IsLui(instr) || IsEmittedConstant(instr)); | 829 DCHECK(IsJ(instr) || IsLui(instr) || IsEmittedConstant(instr)); |
| 804 target_at_put(fixup_pos, pos); | 830 target_at_put(fixup_pos, pos, false); |
| 805 } | 831 } |
| 806 } | 832 } |
| 807 L->bind_to(pos); | 833 L->bind_to(pos); |
| 808 | 834 |
| 809 // Keep track of the last bound label so we don't eliminate any instructions | 835 // Keep track of the last bound label so we don't eliminate any instructions |
| 810 // before a bound label. | 836 // before a bound label. |
| 811 if (pos > last_bound_pos_) | 837 if (pos > last_bound_pos_) |
| 812 last_bound_pos_ = pos; | 838 last_bound_pos_ = pos; |
| 813 } | 839 } |
| 814 | 840 |
| 815 | 841 |
| 816 void Assembler::bind(Label* L) { | 842 void Assembler::bind(Label* L) { |
| 817 DCHECK(!L->is_bound()); // Label can only be bound once. | 843 DCHECK(!L->is_bound()); // Label can only be bound once. |
| 818 bind_to(L, pc_offset()); | 844 bind_to(L, pc_offset()); |
| 819 } | 845 } |
| 820 | 846 |
| 821 | 847 |
| 822 void Assembler::next(Label* L) { | 848 void Assembler::next(Label* L, bool is_internal) { |
| 823 DCHECK(L->is_linked()); | 849 DCHECK(L->is_linked()); |
| 824 int link = target_at(L->pos()); | 850 int link = target_at(L->pos(), is_internal); |
| 825 if (link == kEndOfChain) { | 851 if (link == kEndOfChain) { |
| 826 L->Unuse(); | 852 L->Unuse(); |
| 827 } else { | 853 } else { |
| 828 DCHECK(link >= 0); | 854 DCHECK(link >= 0); |
| 829 L->link_to(link); | 855 L->link_to(link); |
| 830 } | 856 } |
| 831 } | 857 } |
| 832 | 858 |
| 833 | 859 |
| 834 bool Assembler::is_near(Label* L) { | 860 bool Assembler::is_near(Label* L) { |
| (...skipping 1717 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2552 | 2578 |
| 2553 | 2579 |
| 2554 void Assembler::bc1t(int16_t offset, uint16_t cc) { | 2580 void Assembler::bc1t(int16_t offset, uint16_t cc) { |
| 2555 DCHECK(is_uint3(cc)); | 2581 DCHECK(is_uint3(cc)); |
| 2556 Instr instr = COP1 | BC1 | cc << 18 | 1 << 16 | (offset & kImm16Mask); | 2582 Instr instr = COP1 | BC1 | cc << 18 | 1 << 16 | (offset & kImm16Mask); |
| 2557 emit(instr); | 2583 emit(instr); |
| 2558 } | 2584 } |
| 2559 | 2585 |
| 2560 | 2586 |
| 2561 // Debugging. | 2587 // Debugging. |
| 2562 int Assembler::RelocateInternalReference(byte* pc, intptr_t pc_delta) { | 2588 int Assembler::RelocateInternalReference(RelocInfo::Mode rmode, byte* pc, |
| 2589 intptr_t pc_delta) { |
| 2590 if (RelocInfo::IsInternalReference(rmode)) { |
| 2591 int64_t* p = reinterpret_cast<int64_t*>(pc); |
| 2592 if (*p == kEndOfJumpChain) { |
| 2593 return 0; // Number of instructions patched. |
| 2594 } |
| 2595 *p += pc_delta; |
| 2596 return 2; // Number of instructions patched. |
| 2597 } |
| 2563 Instr instr = instr_at(pc); | 2598 Instr instr = instr_at(pc); |
| 2599 DCHECK(RelocInfo::IsInternalReferenceEncoded(rmode)); |
| 2564 DCHECK(IsJ(instr) || IsLui(instr)); | 2600 DCHECK(IsJ(instr) || IsLui(instr)); |
| 2565 if (IsLui(instr)) { | 2601 if (IsLui(instr)) { |
| 2566 Instr instr_lui = instr_at(pc + 0 * Assembler::kInstrSize); | 2602 Instr instr_lui = instr_at(pc + 0 * Assembler::kInstrSize); |
| 2567 Instr instr_ori = instr_at(pc + 1 * Assembler::kInstrSize); | 2603 Instr instr_ori = instr_at(pc + 1 * Assembler::kInstrSize); |
| 2568 Instr instr_ori2 = instr_at(pc + 3 * Assembler::kInstrSize); | 2604 Instr instr_ori2 = instr_at(pc + 3 * Assembler::kInstrSize); |
| 2569 DCHECK(IsOri(instr_ori)); | 2605 DCHECK(IsOri(instr_ori)); |
| 2570 DCHECK(IsOri(instr_ori2)); | 2606 DCHECK(IsOri(instr_ori2)); |
| 2571 // TODO(plind): symbolic names for the shifts. | 2607 // TODO(plind): symbolic names for the shifts. |
| 2572 int64_t imm = (instr_lui & static_cast<int64_t>(kImm16Mask)) << 48; | 2608 int64_t imm = (instr_lui & static_cast<int64_t>(kImm16Mask)) << 48; |
| 2573 imm |= (instr_ori & static_cast<int64_t>(kImm16Mask)) << 32; | 2609 imm |= (instr_ori & static_cast<int64_t>(kImm16Mask)) << 32; |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2642 DeleteArray(buffer_); | 2678 DeleteArray(buffer_); |
| 2643 buffer_ = desc.buffer; | 2679 buffer_ = desc.buffer; |
| 2644 buffer_size_ = desc.buffer_size; | 2680 buffer_size_ = desc.buffer_size; |
| 2645 pc_ += pc_delta; | 2681 pc_ += pc_delta; |
| 2646 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, | 2682 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, |
| 2647 reloc_info_writer.last_pc() + pc_delta); | 2683 reloc_info_writer.last_pc() + pc_delta); |
| 2648 | 2684 |
| 2649 // Relocate runtime entries. | 2685 // Relocate runtime entries. |
| 2650 for (RelocIterator it(desc); !it.done(); it.next()) { | 2686 for (RelocIterator it(desc); !it.done(); it.next()) { |
| 2651 RelocInfo::Mode rmode = it.rinfo()->rmode(); | 2687 RelocInfo::Mode rmode = it.rinfo()->rmode(); |
| 2652 if (rmode == RelocInfo::INTERNAL_REFERENCE) { | 2688 if (rmode == RelocInfo::INTERNAL_REFERENCE_ENCODED || |
| 2689 rmode == RelocInfo::INTERNAL_REFERENCE) { |
| 2653 byte* p = reinterpret_cast<byte*>(it.rinfo()->pc()); | 2690 byte* p = reinterpret_cast<byte*>(it.rinfo()->pc()); |
| 2654 RelocateInternalReference(p, pc_delta); | 2691 RelocateInternalReference(rmode, p, pc_delta); |
| 2655 } | 2692 } |
| 2656 } | 2693 } |
| 2657 | |
| 2658 DCHECK(!overflow()); | 2694 DCHECK(!overflow()); |
| 2659 } | 2695 } |
| 2660 | 2696 |
| 2661 | 2697 |
| 2662 void Assembler::db(uint8_t data) { | 2698 void Assembler::db(uint8_t data) { |
| 2663 CheckBuffer(); | 2699 CheckBuffer(); |
| 2664 *reinterpret_cast<uint8_t*>(pc_) = data; | 2700 *reinterpret_cast<uint8_t*>(pc_) = data; |
| 2665 pc_ += sizeof(uint8_t); | 2701 pc_ += sizeof(uint8_t); |
| 2666 } | 2702 } |
| 2667 | 2703 |
| 2668 | 2704 |
| 2669 void Assembler::dd(uint32_t data) { | 2705 void Assembler::dd(uint32_t data) { |
| 2670 CheckBuffer(); | 2706 CheckBuffer(); |
| 2671 *reinterpret_cast<uint32_t*>(pc_) = data; | 2707 *reinterpret_cast<uint32_t*>(pc_) = data; |
| 2672 pc_ += sizeof(uint32_t); | 2708 pc_ += sizeof(uint32_t); |
| 2673 } | 2709 } |
| 2674 | 2710 |
| 2675 | 2711 |
| 2712 void Assembler::dd(Label* label) { |
| 2713 CheckBuffer(); |
| 2714 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); |
| 2715 if (label->is_bound()) { |
| 2716 uint64_t data = reinterpret_cast<uint64_t>(buffer_ + label->pos()); |
| 2717 *reinterpret_cast<uint64_t*>(pc_) = data; |
| 2718 pc_ += sizeof(uint64_t); |
| 2719 } else { |
| 2720 uint64_t target_pos = jump_address(label); |
| 2721 emit(target_pos); |
| 2722 internal_reference_positions_.insert(label->pos()); |
| 2723 } |
| 2724 } |
| 2725 |
| 2726 |
| 2676 void Assembler::emit_code_stub_address(Code* stub) { | 2727 void Assembler::emit_code_stub_address(Code* stub) { |
| 2677 CheckBuffer(); | 2728 CheckBuffer(); |
| 2678 *reinterpret_cast<uint64_t*>(pc_) = | 2729 *reinterpret_cast<uint64_t*>(pc_) = |
| 2679 reinterpret_cast<uint64_t>(stub->instruction_start()); | 2730 reinterpret_cast<uint64_t>(stub->instruction_start()); |
| 2680 pc_ += sizeof(uint64_t); | 2731 pc_ += sizeof(uint64_t); |
| 2681 } | 2732 } |
| 2682 | 2733 |
| 2683 | 2734 |
| 2684 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2735 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
| 2685 // We do not try to reuse pool constants. | 2736 // We do not try to reuse pool constants. |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2746 nop(); | 2797 nop(); |
| 2747 | 2798 |
| 2748 int pool_start = pc_offset(); | 2799 int pool_start = pc_offset(); |
| 2749 for (int i = 0; i < unbound_labels_count_; i++) { | 2800 for (int i = 0; i < unbound_labels_count_; i++) { |
| 2750 uint64_t imm64; | 2801 uint64_t imm64; |
| 2751 imm64 = jump_address(&after_pool); | 2802 imm64 = jump_address(&after_pool); |
| 2752 { BlockGrowBufferScope block_buf_growth(this); | 2803 { BlockGrowBufferScope block_buf_growth(this); |
| 2753 // Buffer growth (and relocation) must be blocked for internal | 2804 // Buffer growth (and relocation) must be blocked for internal |
| 2754 // references until associated instructions are emitted and available | 2805 // references until associated instructions are emitted and available |
| 2755 // to be patched. | 2806 // to be patched. |
| 2756 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); | 2807 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
| 2757 // TODO(plind): Verify this, presume I cannot use macro-assembler | 2808 // TODO(plind): Verify this, presume I cannot use macro-assembler |
| 2758 // here. | 2809 // here. |
| 2759 lui(at, (imm64 >> 32) & kImm16Mask); | 2810 lui(at, (imm64 >> 32) & kImm16Mask); |
| 2760 ori(at, at, (imm64 >> 16) & kImm16Mask); | 2811 ori(at, at, (imm64 >> 16) & kImm16Mask); |
| 2761 dsll(at, at, 16); | 2812 dsll(at, at, 16); |
| 2762 ori(at, at, imm64 & kImm16Mask); | 2813 ori(at, at, imm64 & kImm16Mask); |
| 2763 } | 2814 } |
| 2764 jr(at); | 2815 jr(at); |
| 2765 nop(); | 2816 nop(); |
| 2766 } | 2817 } |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2910 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { | 2961 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { |
| 2911 // No out-of-line constant pool support. | 2962 // No out-of-line constant pool support. |
| 2912 DCHECK(!FLAG_enable_ool_constant_pool); | 2963 DCHECK(!FLAG_enable_ool_constant_pool); |
| 2913 return; | 2964 return; |
| 2914 } | 2965 } |
| 2915 | 2966 |
| 2916 | 2967 |
| 2917 } } // namespace v8::internal | 2968 } } // namespace v8::internal |
| 2918 | 2969 |
| 2919 #endif // V8_TARGET_ARCH_MIPS64 | 2970 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |