| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 619 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 630 if ((instr & ~kImm16Mask) == 0) { | 630 if ((instr & ~kImm16Mask) == 0) { |
| 631 // Emitted label constant, not part of a branch. | 631 // Emitted label constant, not part of a branch. |
| 632 if (instr == 0) { | 632 if (instr == 0) { |
| 633 return kEndOfChain; | 633 return kEndOfChain; |
| 634 } else { | 634 } else { |
| 635 int32_t imm18 =((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; | 635 int32_t imm18 =((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; |
| 636 return (imm18 + pos); | 636 return (imm18 + pos); |
| 637 } | 637 } |
| 638 } | 638 } |
| 639 // Check we have a branch or jump instruction. | 639 // Check we have a branch or jump instruction. |
| 640 DCHECK(IsBranch(instr) || IsLui(instr)); | 640 DCHECK(IsBranch(instr) || IsJ(instr) || IsJal(instr) || IsLui(instr)); |
| 641 // Do NOT change this to <<2. We rely on arithmetic shifts here, assuming | 641 // Do NOT change this to <<2. We rely on arithmetic shifts here, assuming |
| 642 // the compiler uses arithmetic shifts for signed integers. | 642 // the compiler uses arithmetic shifts for signed integers. |
| 643 if (IsBranch(instr)) { | 643 if (IsBranch(instr)) { |
| 644 int32_t imm18 = ((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; | 644 int32_t imm18 = ((instr & static_cast<int32_t>(kImm16Mask)) << 16) >> 14; |
| 645 if (imm18 == kEndOfChain) { | 645 if (imm18 == kEndOfChain) { |
| 646 // EndOfChain sentinel is returned directly, not relative to pc or pos. | 646 // EndOfChain sentinel is returned directly, not relative to pc or pos. |
| 647 return kEndOfChain; | 647 return kEndOfChain; |
| 648 } else { | 648 } else { |
| 649 return pos + kBranchPCOffset + imm18; | 649 return pos + kBranchPCOffset + imm18; |
| 650 } | 650 } |
| (...skipping 15 matching lines...) Expand all Loading... |
| 666 // EndOfChain sentinel is returned directly, not relative to pc or pos. | 666 // EndOfChain sentinel is returned directly, not relative to pc or pos. |
| 667 return kEndOfChain; | 667 return kEndOfChain; |
| 668 } else { | 668 } else { |
| 669 uint64_t instr_address = reinterpret_cast<int64_t>(buffer_ + pos); | 669 uint64_t instr_address = reinterpret_cast<int64_t>(buffer_ + pos); |
| 670 DCHECK(instr_address - imm < INT_MAX); | 670 DCHECK(instr_address - imm < INT_MAX); |
| 671 int delta = static_cast<int>(instr_address - imm); | 671 int delta = static_cast<int>(instr_address - imm); |
| 672 DCHECK(pos > delta); | 672 DCHECK(pos > delta); |
| 673 return pos - delta; | 673 return pos - delta; |
| 674 } | 674 } |
| 675 } else { | 675 } else { |
| 676 UNREACHABLE(); | 676 DCHECK(IsJ(instr) || IsJal(instr)); |
| 677 return 0; | 677 int32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; |
| 678 if (imm28 == kEndOfJumpChain) { |
| 679 // EndOfChain sentinel is returned directly, not relative to pc or pos. |
| 680 return kEndOfChain; |
| 681 } else { |
| 682 uint64_t instr_address = reinterpret_cast<int64_t>(buffer_ + pos); |
| 683 instr_address &= kImm28Mask; |
| 684 int delta = static_cast<int>(instr_address - imm28); |
| 685 DCHECK(pos > delta); |
| 686 return pos - delta; |
| 687 } |
| 678 } | 688 } |
| 679 } | 689 } |
| 680 | 690 |
| 681 | 691 |
| 682 void Assembler::target_at_put(int pos, int target_pos, bool is_internal) { | 692 void Assembler::target_at_put(int pos, int target_pos, bool is_internal) { |
| 683 if (is_internal) { | 693 if (is_internal) { |
| 684 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 694 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
| 685 *reinterpret_cast<uint64_t*>(buffer_ + pos) = imm; | 695 *reinterpret_cast<uint64_t*>(buffer_ + pos) = imm; |
| 686 return; | 696 return; |
| 687 } | 697 } |
| 688 Instr instr = instr_at(pos); | 698 Instr instr = instr_at(pos); |
| 689 if ((instr & ~kImm16Mask) == 0) { | 699 if ((instr & ~kImm16Mask) == 0) { |
| 690 DCHECK(target_pos == kEndOfChain || target_pos >= 0); | 700 DCHECK(target_pos == kEndOfChain || target_pos >= 0); |
| 691 // Emitted label constant, not part of a branch. | 701 // Emitted label constant, not part of a branch. |
| 692 // Make label relative to Code* of generated Code object. | 702 // Make label relative to Code* of generated Code object. |
| 693 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag)); | 703 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag)); |
| 694 return; | 704 return; |
| 695 } | 705 } |
| 696 | 706 |
| 697 DCHECK(IsBranch(instr) || IsLui(instr)); | 707 DCHECK(IsBranch(instr) || IsJ(instr) || IsJal(instr) || IsLui(instr)); |
| 698 if (IsBranch(instr)) { | 708 if (IsBranch(instr)) { |
| 699 int32_t imm18 = target_pos - (pos + kBranchPCOffset); | 709 int32_t imm18 = target_pos - (pos + kBranchPCOffset); |
| 700 DCHECK((imm18 & 3) == 0); | 710 DCHECK((imm18 & 3) == 0); |
| 701 | 711 |
| 702 instr &= ~kImm16Mask; | 712 instr &= ~kImm16Mask; |
| 703 int32_t imm16 = imm18 >> 2; | 713 int32_t imm16 = imm18 >> 2; |
| 704 DCHECK(is_int16(imm16)); | 714 DCHECK(is_int16(imm16)); |
| 705 | 715 |
| 706 instr_at_put(pos, instr | (imm16 & kImm16Mask)); | 716 instr_at_put(pos, instr | (imm16 & kImm16Mask)); |
| 707 } else if (IsLui(instr)) { | 717 } else if (IsLui(instr)) { |
| (...skipping 10 matching lines...) Expand all Loading... |
| 718 instr_ori &= ~kImm16Mask; | 728 instr_ori &= ~kImm16Mask; |
| 719 instr_ori2 &= ~kImm16Mask; | 729 instr_ori2 &= ~kImm16Mask; |
| 720 | 730 |
| 721 instr_at_put(pos + 0 * Assembler::kInstrSize, | 731 instr_at_put(pos + 0 * Assembler::kInstrSize, |
| 722 instr_lui | ((imm >> 32) & kImm16Mask)); | 732 instr_lui | ((imm >> 32) & kImm16Mask)); |
| 723 instr_at_put(pos + 1 * Assembler::kInstrSize, | 733 instr_at_put(pos + 1 * Assembler::kInstrSize, |
| 724 instr_ori | ((imm >> 16) & kImm16Mask)); | 734 instr_ori | ((imm >> 16) & kImm16Mask)); |
| 725 instr_at_put(pos + 3 * Assembler::kInstrSize, | 735 instr_at_put(pos + 3 * Assembler::kInstrSize, |
| 726 instr_ori2 | (imm & kImm16Mask)); | 736 instr_ori2 | (imm & kImm16Mask)); |
| 727 } else { | 737 } else { |
| 728 UNREACHABLE(); | 738 DCHECK(IsJ(instr) || IsJal(instr)); |
| 739 uint64_t imm28 = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
| 740 imm28 &= kImm28Mask; |
| 741 DCHECK((imm28 & 3) == 0); |
| 742 |
| 743 instr &= ~kImm26Mask; |
| 744 uint32_t imm26 = imm28 >> 2; |
| 745 DCHECK(is_uint26(imm26)); |
| 746 |
| 747 instr_at_put(pos, instr | (imm26 & kImm26Mask)); |
| 729 } | 748 } |
| 730 } | 749 } |
| 731 | 750 |
| 732 | 751 |
| 733 void Assembler::print(Label* L) { | 752 void Assembler::print(Label* L) { |
| 734 if (L->is_unused()) { | 753 if (L->is_unused()) { |
| 735 PrintF("unused label\n"); | 754 PrintF("unused label\n"); |
| 736 } else if (L->is_bound()) { | 755 } else if (L->is_bound()) { |
| 737 PrintF("bound label to %d\n", L->pos()); | 756 PrintF("bound label to %d\n", L->pos()); |
| 738 } else if (L->is_linked()) { | 757 } else if (L->is_linked()) { |
| (...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 780 trampoline_pos = get_trampoline_entry(fixup_pos); | 799 trampoline_pos = get_trampoline_entry(fixup_pos); |
| 781 CHECK(trampoline_pos != kInvalidSlotPos); | 800 CHECK(trampoline_pos != kInvalidSlotPos); |
| 782 } | 801 } |
| 783 DCHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset); | 802 DCHECK((trampoline_pos - fixup_pos) <= kMaxBranchOffset); |
| 784 target_at_put(fixup_pos, trampoline_pos, false); | 803 target_at_put(fixup_pos, trampoline_pos, false); |
| 785 fixup_pos = trampoline_pos; | 804 fixup_pos = trampoline_pos; |
| 786 dist = pos - fixup_pos; | 805 dist = pos - fixup_pos; |
| 787 } | 806 } |
| 788 target_at_put(fixup_pos, pos, false); | 807 target_at_put(fixup_pos, pos, false); |
| 789 } else { | 808 } else { |
| 790 DCHECK(IsJ(instr) || IsLui(instr) || IsEmittedConstant(instr)); | 809 DCHECK(IsJ(instr) || IsJal(instr) || IsLui(instr) || |
| 810 IsEmittedConstant(instr)); |
| 791 target_at_put(fixup_pos, pos, false); | 811 target_at_put(fixup_pos, pos, false); |
| 792 } | 812 } |
| 793 } | 813 } |
| 794 L->bind_to(pos); | 814 L->bind_to(pos); |
| 795 | 815 |
| 796 // Keep track of the last bound label so we don't eliminate any instructions | 816 // Keep track of the last bound label so we don't eliminate any instructions |
| 797 // before a bound label. | 817 // before a bound label. |
| 798 if (pos > last_bound_pos_) | 818 if (pos > last_bound_pos_) |
| 799 last_bound_pos_ = pos; | 819 last_bound_pos_ = pos; |
| 800 } | 820 } |
| (...skipping 176 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 977 target_pos = L->pos(); | 997 target_pos = L->pos(); |
| 978 } else { | 998 } else { |
| 979 if (L->is_linked()) { | 999 if (L->is_linked()) { |
| 980 target_pos = L->pos(); // L's link. | 1000 target_pos = L->pos(); // L's link. |
| 981 L->link_to(pc_offset()); | 1001 L->link_to(pc_offset()); |
| 982 } else { | 1002 } else { |
| 983 L->link_to(pc_offset()); | 1003 L->link_to(pc_offset()); |
| 984 return kEndOfJumpChain; | 1004 return kEndOfJumpChain; |
| 985 } | 1005 } |
| 986 } | 1006 } |
| 987 | |
| 988 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 1007 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
| 989 DCHECK((imm & 3) == 0); | 1008 DCHECK((imm & 3) == 0); |
| 990 | 1009 |
| 991 return imm; | 1010 return imm; |
| 992 } | 1011 } |
| 993 | 1012 |
| 994 | 1013 |
| 995 int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { | 1014 int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { |
| 996 int32_t target_pos; | 1015 int32_t target_pos; |
| 997 if (L->is_bound()) { | 1016 if (L->is_bound()) { |
| (...skipping 354 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1352 DCHECK(kArchVariant == kMips64r6); | 1371 DCHECK(kArchVariant == kMips64r6); |
| 1353 DCHECK(!(rs.is(zero_reg))); | 1372 DCHECK(!(rs.is(zero_reg))); |
| 1354 Instr instr = BNEZC | (rs.code() << kRsShift) | offset; | 1373 Instr instr = BNEZC | (rs.code() << kRsShift) | offset; |
| 1355 emit(instr); | 1374 emit(instr); |
| 1356 } | 1375 } |
| 1357 | 1376 |
| 1358 | 1377 |
| 1359 void Assembler::j(int64_t target) { | 1378 void Assembler::j(int64_t target) { |
| 1360 #if DEBUG | 1379 #if DEBUG |
| 1361 // Get pc of delay slot. | 1380 // Get pc of delay slot. |
| 1362 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); | 1381 if (target != kEndOfJumpChain) { |
| 1363 bool in_range = (ipc ^ static_cast<uint64_t>(target) >> | 1382 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); |
| 1364 (kImm26Bits + kImmFieldShift)) == 0; | 1383 bool in_range = ((ipc ^ static_cast<uint64_t>(target)) >> |
| 1365 DCHECK(in_range && ((target & 3) == 0)); | 1384 (kImm26Bits + kImmFieldShift)) == 0; |
| 1385 DCHECK(in_range && ((target & 3) == 0)); |
| 1386 } |
| 1366 #endif | 1387 #endif |
| 1367 GenInstrJump(J, target >> 2); | 1388 GenInstrJump(J, (target >> 2) & kImm26Mask); |
| 1368 } | 1389 } |
| 1369 | 1390 |
| 1370 | 1391 |
| 1371 void Assembler::jr(Register rs) { | 1392 void Assembler::jr(Register rs) { |
| 1372 if (kArchVariant != kMips64r6) { | 1393 if (kArchVariant != kMips64r6) { |
| 1373 BlockTrampolinePoolScope block_trampoline_pool(this); | 1394 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1374 if (rs.is(ra)) { | 1395 if (rs.is(ra)) { |
| 1375 positions_recorder()->WriteRecordedPositions(); | 1396 positions_recorder()->WriteRecordedPositions(); |
| 1376 } | 1397 } |
| 1377 GenInstrRegister(SPECIAL, rs, zero_reg, zero_reg, 0, JR); | 1398 GenInstrRegister(SPECIAL, rs, zero_reg, zero_reg, 0, JR); |
| 1378 BlockTrampolinePoolFor(1); // For associated delay slot. | 1399 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1379 } else { | 1400 } else { |
| 1380 jalr(rs, zero_reg); | 1401 jalr(rs, zero_reg); |
| 1381 } | 1402 } |
| 1382 } | 1403 } |
| 1383 | 1404 |
| 1384 | 1405 |
| 1385 void Assembler::jal(int64_t target) { | 1406 void Assembler::jal(int64_t target) { |
| 1386 #ifdef DEBUG | 1407 #ifdef DEBUG |
| 1387 // Get pc of delay slot. | 1408 // Get pc of delay slot. |
| 1388 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); | 1409 if (target != kEndOfJumpChain) { |
| 1389 bool in_range = (ipc ^ static_cast<uint64_t>(target) >> | 1410 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); |
| 1390 (kImm26Bits + kImmFieldShift)) == 0; | 1411 bool in_range = ((ipc ^ static_cast<uint64_t>(target)) >> |
| 1391 DCHECK(in_range && ((target & 3) == 0)); | 1412 (kImm26Bits + kImmFieldShift)) == 0; |
| 1413 DCHECK(in_range && ((target & 3) == 0)); |
| 1414 } |
| 1392 #endif | 1415 #endif |
| 1393 positions_recorder()->WriteRecordedPositions(); | 1416 positions_recorder()->WriteRecordedPositions(); |
| 1394 GenInstrJump(JAL, target >> 2); | 1417 GenInstrJump(JAL, (target >> 2) & kImm26Mask); |
| 1395 } | 1418 } |
| 1396 | 1419 |
| 1397 | 1420 |
| 1398 void Assembler::jalr(Register rs, Register rd) { | 1421 void Assembler::jalr(Register rs, Register rd) { |
| 1399 BlockTrampolinePoolScope block_trampoline_pool(this); | 1422 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1400 positions_recorder()->WriteRecordedPositions(); | 1423 positions_recorder()->WriteRecordedPositions(); |
| 1401 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 0, JALR); | 1424 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 0, JALR); |
| 1402 BlockTrampolinePoolFor(1); // For associated delay slot. | 1425 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1403 } | 1426 } |
| 1404 | 1427 |
| (...skipping 1352 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2757 if (RelocInfo::IsInternalReference(rmode)) { | 2780 if (RelocInfo::IsInternalReference(rmode)) { |
| 2758 int64_t* p = reinterpret_cast<int64_t*>(pc); | 2781 int64_t* p = reinterpret_cast<int64_t*>(pc); |
| 2759 if (*p == kEndOfJumpChain) { | 2782 if (*p == kEndOfJumpChain) { |
| 2760 return 0; // Number of instructions patched. | 2783 return 0; // Number of instructions patched. |
| 2761 } | 2784 } |
| 2762 *p += pc_delta; | 2785 *p += pc_delta; |
| 2763 return 2; // Number of instructions patched. | 2786 return 2; // Number of instructions patched. |
| 2764 } | 2787 } |
| 2765 Instr instr = instr_at(pc); | 2788 Instr instr = instr_at(pc); |
| 2766 DCHECK(RelocInfo::IsInternalReferenceEncoded(rmode)); | 2789 DCHECK(RelocInfo::IsInternalReferenceEncoded(rmode)); |
| 2790 DCHECK(IsJ(instr) || IsLui(instr) || IsJal(instr)); |
| 2767 if (IsLui(instr)) { | 2791 if (IsLui(instr)) { |
| 2768 Instr instr_lui = instr_at(pc + 0 * Assembler::kInstrSize); | 2792 Instr instr_lui = instr_at(pc + 0 * Assembler::kInstrSize); |
| 2769 Instr instr_ori = instr_at(pc + 1 * Assembler::kInstrSize); | 2793 Instr instr_ori = instr_at(pc + 1 * Assembler::kInstrSize); |
| 2770 Instr instr_ori2 = instr_at(pc + 3 * Assembler::kInstrSize); | 2794 Instr instr_ori2 = instr_at(pc + 3 * Assembler::kInstrSize); |
| 2771 DCHECK(IsOri(instr_ori)); | 2795 DCHECK(IsOri(instr_ori)); |
| 2772 DCHECK(IsOri(instr_ori2)); | 2796 DCHECK(IsOri(instr_ori2)); |
| 2773 // TODO(plind): symbolic names for the shifts. | 2797 // TODO(plind): symbolic names for the shifts. |
| 2774 int64_t imm = (instr_lui & static_cast<int64_t>(kImm16Mask)) << 48; | 2798 int64_t imm = (instr_lui & static_cast<int64_t>(kImm16Mask)) << 48; |
| 2775 imm |= (instr_ori & static_cast<int64_t>(kImm16Mask)) << 32; | 2799 imm |= (instr_ori & static_cast<int64_t>(kImm16Mask)) << 32; |
| 2776 imm |= (instr_ori2 & static_cast<int64_t>(kImm16Mask)) << 16; | 2800 imm |= (instr_ori2 & static_cast<int64_t>(kImm16Mask)) << 16; |
| (...skipping 11 matching lines...) Expand all Loading... |
| 2788 instr_ori2 &= ~kImm16Mask; | 2812 instr_ori2 &= ~kImm16Mask; |
| 2789 | 2813 |
| 2790 instr_at_put(pc + 0 * Assembler::kInstrSize, | 2814 instr_at_put(pc + 0 * Assembler::kInstrSize, |
| 2791 instr_lui | ((imm >> 32) & kImm16Mask)); | 2815 instr_lui | ((imm >> 32) & kImm16Mask)); |
| 2792 instr_at_put(pc + 1 * Assembler::kInstrSize, | 2816 instr_at_put(pc + 1 * Assembler::kInstrSize, |
| 2793 instr_ori | (imm >> 16 & kImm16Mask)); | 2817 instr_ori | (imm >> 16 & kImm16Mask)); |
| 2794 instr_at_put(pc + 3 * Assembler::kInstrSize, | 2818 instr_at_put(pc + 3 * Assembler::kInstrSize, |
| 2795 instr_ori2 | (imm & kImm16Mask)); | 2819 instr_ori2 | (imm & kImm16Mask)); |
| 2796 return 4; // Number of instructions patched. | 2820 return 4; // Number of instructions patched. |
| 2797 } else { | 2821 } else { |
| 2798 UNREACHABLE(); | 2822 uint32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; |
| 2799 return 0; // Number of instructions patched. | 2823 if (static_cast<int32_t>(imm28) == kEndOfJumpChain) { |
| 2824 return 0; // Number of instructions patched. |
| 2825 } |
| 2826 |
| 2827 imm28 += pc_delta; |
| 2828 imm28 &= kImm28Mask; |
| 2829 DCHECK((imm28 & 3) == 0); |
| 2830 |
| 2831 instr &= ~kImm26Mask; |
| 2832 uint32_t imm26 = imm28 >> 2; |
| 2833 DCHECK(is_uint26(imm26)); |
| 2834 |
| 2835 instr_at_put(pc, instr | (imm26 & kImm26Mask)); |
| 2836 return 1; // Number of instructions patched. |
| 2800 } | 2837 } |
| 2801 } | 2838 } |
| 2802 | 2839 |
| 2803 | 2840 |
| 2804 void Assembler::GrowBuffer() { | 2841 void Assembler::GrowBuffer() { |
| 2805 if (!own_buffer_) FATAL("external code buffer is too small"); | 2842 if (!own_buffer_) FATAL("external code buffer is too small"); |
| 2806 | 2843 |
| 2807 // Compute new buffer size. | 2844 // Compute new buffer size. |
| 2808 CodeDesc desc; // The new buffer. | 2845 CodeDesc desc; // The new buffer. |
| 2809 if (buffer_size_ < 1 * MB) { | 2846 if (buffer_size_ < 1 * MB) { |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2951 | 2988 |
| 2952 int pool_start = pc_offset(); | 2989 int pool_start = pc_offset(); |
| 2953 for (int i = 0; i < unbound_labels_count_; i++) { | 2990 for (int i = 0; i < unbound_labels_count_; i++) { |
| 2954 uint64_t imm64; | 2991 uint64_t imm64; |
| 2955 imm64 = jump_address(&after_pool); | 2992 imm64 = jump_address(&after_pool); |
| 2956 { BlockGrowBufferScope block_buf_growth(this); | 2993 { BlockGrowBufferScope block_buf_growth(this); |
| 2957 // Buffer growth (and relocation) must be blocked for internal | 2994 // Buffer growth (and relocation) must be blocked for internal |
| 2958 // references until associated instructions are emitted and available | 2995 // references until associated instructions are emitted and available |
| 2959 // to be patched. | 2996 // to be patched. |
| 2960 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 2997 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
| 2961 // TODO(plind): Verify this, presume I cannot use macro-assembler | 2998 j(imm64); |
| 2962 // here. | |
| 2963 lui(at, (imm64 >> 32) & kImm16Mask); | |
| 2964 ori(at, at, (imm64 >> 16) & kImm16Mask); | |
| 2965 dsll(at, at, 16); | |
| 2966 ori(at, at, imm64 & kImm16Mask); | |
| 2967 } | 2999 } |
| 2968 jr(at); | |
| 2969 nop(); | 3000 nop(); |
| 2970 } | 3001 } |
| 2971 bind(&after_pool); | 3002 bind(&after_pool); |
| 2972 trampoline_ = Trampoline(pool_start, unbound_labels_count_); | 3003 trampoline_ = Trampoline(pool_start, unbound_labels_count_); |
| 2973 | 3004 |
| 2974 trampoline_emitted_ = true; | 3005 trampoline_emitted_ = true; |
| 2975 // As we are only going to emit trampoline once, we need to prevent any | 3006 // As we are only going to emit trampoline once, we need to prevent any |
| 2976 // further emission. | 3007 // further emission. |
| 2977 next_buffer_check_ = kMaxInt; | 3008 next_buffer_check_ = kMaxInt; |
| 2978 } | 3009 } |
| (...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3080 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { | 3111 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { |
| 3081 // No out-of-line constant pool support. | 3112 // No out-of-line constant pool support. |
| 3082 DCHECK(!FLAG_enable_ool_constant_pool); | 3113 DCHECK(!FLAG_enable_ool_constant_pool); |
| 3083 return; | 3114 return; |
| 3084 } | 3115 } |
| 3085 | 3116 |
| 3086 | 3117 |
| 3087 } } // namespace v8::internal | 3118 } } // namespace v8::internal |
| 3088 | 3119 |
| 3089 #endif // V8_TARGET_ARCH_MIPS64 | 3120 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |