OLD | NEW |
---|---|
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
6 // met: | 6 // met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 663 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
674 DCHECK(pos > delta); | 674 DCHECK(pos > delta); |
675 return pos - delta; | 675 return pos - delta; |
676 } | 676 } |
677 } else { | 677 } else { |
678 DCHECK(IsJ(instr) || IsJal(instr)); | 678 DCHECK(IsJ(instr) || IsJal(instr)); |
679 int32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; | 679 int32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; |
680 if (imm28 == kEndOfJumpChain) { | 680 if (imm28 == kEndOfJumpChain) { |
681 // EndOfChain sentinel is returned directly, not relative to pc or pos. | 681 // EndOfChain sentinel is returned directly, not relative to pc or pos. |
682 return kEndOfChain; | 682 return kEndOfChain; |
683 } else { | 683 } else { |
684 uint64_t instr_address = reinterpret_cast<int64_t>(buffer_ + pos); | 684 // Sign extend 28-bit offset. |
685 instr_address &= kImm28Mask; | 685 int32_t delta = static_cast<int32_t>((imm28 << 4) >> 4); |
686 int delta = static_cast<int>(instr_address - imm28); | 686 return pos + delta; |
687 DCHECK(pos > delta); | |
688 return pos - delta; | |
689 } | 687 } |
690 } | 688 } |
691 } | 689 } |
692 | 690 |
693 | 691 |
694 void Assembler::target_at_put(int pos, int target_pos, bool is_internal) { | 692 void Assembler::target_at_put(int pos, int target_pos, bool is_internal) { |
695 if (is_internal) { | 693 if (is_internal) { |
696 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 694 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
697 *reinterpret_cast<uint64_t*>(buffer_ + pos) = imm; | 695 *reinterpret_cast<uint64_t*>(buffer_ + pos) = imm; |
698 return; | 696 return; |
699 } | 697 } |
700 Instr instr = instr_at(pos); | 698 Instr instr = instr_at(pos); |
701 if ((instr & ~kImm16Mask) == 0) { | 699 if ((instr & ~kImm16Mask) == 0) { |
702 DCHECK(target_pos == kEndOfChain || target_pos >= 0); | 700 DCHECK(target_pos == kEndOfChain || target_pos >= 0); |
703 // Emitted label constant, not part of a branch. | 701 // Emitted label constant, not part of a branch. |
704 // Make label relative to Code* of generated Code object. | 702 // Make label relative to Code* of generated Code object. |
705 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag)); | 703 instr_at_put(pos, target_pos + (Code::kHeaderSize - kHeapObjectTag)); |
706 return; | 704 return; |
707 } | 705 } |
708 | 706 |
709 DCHECK(IsBranch(instr) || IsJ(instr) || IsJal(instr) || IsLui(instr)); | |
710 if (IsBranch(instr)) { | 707 if (IsBranch(instr)) { |
711 int32_t imm18 = target_pos - (pos + kBranchPCOffset); | 708 int32_t imm18 = target_pos - (pos + kBranchPCOffset); |
712 DCHECK((imm18 & 3) == 0); | 709 DCHECK((imm18 & 3) == 0); |
713 | 710 |
714 instr &= ~kImm16Mask; | 711 instr &= ~kImm16Mask; |
715 int32_t imm16 = imm18 >> 2; | 712 int32_t imm16 = imm18 >> 2; |
716 DCHECK(is_int16(imm16)); | 713 DCHECK(is_int16(imm16)); |
717 | 714 |
718 instr_at_put(pos, instr | (imm16 & kImm16Mask)); | 715 instr_at_put(pos, instr | (imm16 & kImm16Mask)); |
719 } else if (IsLui(instr)) { | 716 } else if (IsLui(instr)) { |
720 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize); | 717 Instr instr_lui = instr_at(pos + 0 * Assembler::kInstrSize); |
721 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize); | 718 Instr instr_ori = instr_at(pos + 1 * Assembler::kInstrSize); |
722 Instr instr_ori2 = instr_at(pos + 3 * Assembler::kInstrSize); | 719 Instr instr_ori2 = instr_at(pos + 3 * Assembler::kInstrSize); |
723 DCHECK(IsOri(instr_ori)); | 720 DCHECK(IsOri(instr_ori)); |
724 DCHECK(IsOri(instr_ori2)); | 721 DCHECK(IsOri(instr_ori2)); |
725 | 722 |
726 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 723 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
727 DCHECK((imm & 3) == 0); | 724 DCHECK((imm & 3) == 0); |
728 | 725 |
729 instr_lui &= ~kImm16Mask; | 726 instr_lui &= ~kImm16Mask; |
730 instr_ori &= ~kImm16Mask; | 727 instr_ori &= ~kImm16Mask; |
731 instr_ori2 &= ~kImm16Mask; | 728 instr_ori2 &= ~kImm16Mask; |
732 | 729 |
733 instr_at_put(pos + 0 * Assembler::kInstrSize, | 730 instr_at_put(pos + 0 * Assembler::kInstrSize, |
734 instr_lui | ((imm >> 32) & kImm16Mask)); | 731 instr_lui | ((imm >> 32) & kImm16Mask)); |
735 instr_at_put(pos + 1 * Assembler::kInstrSize, | 732 instr_at_put(pos + 1 * Assembler::kInstrSize, |
736 instr_ori | ((imm >> 16) & kImm16Mask)); | 733 instr_ori | ((imm >> 16) & kImm16Mask)); |
737 instr_at_put(pos + 3 * Assembler::kInstrSize, | 734 instr_at_put(pos + 3 * Assembler::kInstrSize, |
738 instr_ori2 | (imm & kImm16Mask)); | 735 instr_ori2 | (imm & kImm16Mask)); |
739 } else { | 736 } else if (IsJ(instr) || IsJal(instr)) { |
740 DCHECK(IsJ(instr) || IsJal(instr)); | 737 int32_t imm28 = target_pos - pos; |
741 uint64_t imm28 = reinterpret_cast<uint64_t>(buffer_) + target_pos; | |
742 imm28 &= kImm28Mask; | |
743 DCHECK((imm28 & 3) == 0); | 738 DCHECK((imm28 & 3) == 0); |
744 | 739 |
745 instr &= ~kImm26Mask; | |
746 uint32_t imm26 = static_cast<uint32_t>(imm28 >> 2); | 740 uint32_t imm26 = static_cast<uint32_t>(imm28 >> 2); |
747 DCHECK(is_uint26(imm26)); | 741 DCHECK(is_uint26(imm26)); |
742 // Place 26-bit signed offset with markings. | |
743 // When code is committed it will be resolved to j/jal. | |
744 int32_t mark = IsJ(instr) ? kJRawMark : kJalRawMark; | |
745 instr_at_put(pos, mark | (imm26 & kImm26Mask)); | |
746 } else { | |
747 int32_t imm28 = target_pos - pos; | |
748 DCHECK((imm28 & 3) == 0); | |
748 | 749 |
750 uint32_t imm26 = static_cast<uint32_t>(imm28 >> 2); | |
751 DCHECK(is_uint26(imm26)); | |
752 // Place raw 26-bit signed offset. | |
753 // When code is committed it will be resolved to j/jal. | |
754 instr &= ~kImm26Mask; | |
749 instr_at_put(pos, instr | (imm26 & kImm26Mask)); | 755 instr_at_put(pos, instr | (imm26 & kImm26Mask)); |
750 } | 756 } |
751 } | 757 } |
752 | 758 |
753 | 759 |
754 void Assembler::print(Label* L) { | 760 void Assembler::print(Label* L) { |
755 if (L->is_unused()) { | 761 if (L->is_unused()) { |
756 PrintF("unused label\n"); | 762 PrintF("unused label\n"); |
757 } else if (L->is_bound()) { | 763 } else if (L->is_bound()) { |
758 PrintF("bound label to %d\n", L->pos()); | 764 PrintF("bound label to %d\n", L->pos()); |
(...skipping 261 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1020 return kEndOfJumpChain; | 1026 return kEndOfJumpChain; |
1021 } | 1027 } |
1022 } | 1028 } |
1023 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 1029 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
1024 DCHECK((imm & 3) == 0); | 1030 DCHECK((imm & 3) == 0); |
1025 | 1031 |
1026 return imm; | 1032 return imm; |
1027 } | 1033 } |
1028 | 1034 |
1029 | 1035 |
1036 uint64_t Assembler::jump_offset(Label* L) { | |
1037 int64_t target_pos; | |
1038 if (L->is_bound()) { | |
1039 target_pos = L->pos(); | |
1040 } else { | |
1041 if (L->is_linked()) { | |
1042 target_pos = L->pos(); // L's link. | |
1043 L->link_to(pc_offset()); | |
1044 } else { | |
1045 L->link_to(pc_offset()); | |
1046 return kEndOfJumpChain; | |
1047 } | |
1048 } | |
1049 int64_t imm = target_pos - pc_offset(); | |
1050 DCHECK((imm & 3) == 0); | |
1051 | |
1052 return static_cast<uint64_t>(imm); | |
1053 } | |
1054 | |
1055 | |
1030 int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { | 1056 int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { |
1031 int32_t target_pos; | 1057 int32_t target_pos; |
1032 if (L->is_bound()) { | 1058 if (L->is_bound()) { |
1033 target_pos = L->pos(); | 1059 target_pos = L->pos(); |
1034 } else { | 1060 } else { |
1035 if (L->is_linked()) { | 1061 if (L->is_linked()) { |
1036 target_pos = L->pos(); | 1062 target_pos = L->pos(); |
1037 L->link_to(pc_offset()); | 1063 L->link_to(pc_offset()); |
1038 } else { | 1064 } else { |
1039 L->link_to(pc_offset()); | 1065 L->link_to(pc_offset()); |
(...skipping 358 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1398 | 1424 |
1399 void Assembler::bnezc(Register rs, int32_t offset) { | 1425 void Assembler::bnezc(Register rs, int32_t offset) { |
1400 DCHECK(kArchVariant == kMips64r6); | 1426 DCHECK(kArchVariant == kMips64r6); |
1401 DCHECK(!(rs.is(zero_reg))); | 1427 DCHECK(!(rs.is(zero_reg))); |
1402 Instr instr = POP76 | (rs.code() << kRsShift) | offset; | 1428 Instr instr = POP76 | (rs.code() << kRsShift) | offset; |
1403 emit(instr); | 1429 emit(instr); |
1404 } | 1430 } |
1405 | 1431 |
1406 | 1432 |
1407 void Assembler::j(int64_t target) { | 1433 void Assembler::j(int64_t target) { |
1408 #if DEBUG | 1434 GenInstrJump(J, static_cast<uint32_t>(target >> 2) & kImm26Mask); |
1409 // Get pc of delay slot. | 1435 } |
1410 if (target != kEndOfJumpChain) { | 1436 |
1411 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); | 1437 |
1412 bool in_range = ((ipc ^ static_cast<uint64_t>(target)) >> | 1438 void Assembler::j(Label* target) { |
1413 (kImm26Bits + kImmFieldShift)) == 0; | 1439 uint64_t imm = jump_offset(target); |
1414 DCHECK(in_range && ((target & 3) == 0)); | 1440 if (target->is_bound()) { |
1441 GenInstrJump(static_cast<Opcode>(kJRawMark), | |
1442 static_cast<uint32_t>(imm >> 2) & kImm26Mask); | |
1443 } else { | |
1444 j(imm); | |
1415 } | 1445 } |
1416 #endif | 1446 } |
1417 GenInstrJump(J, static_cast<uint32_t>(target >> 2) & kImm26Mask); | 1447 |
1448 | |
1449 void Assembler::jal(Label* target) { | |
1450 uint64_t imm = jump_offset(target); | |
1451 if (target->is_bound()) { | |
1452 GenInstrJump(static_cast<Opcode>(kJalRawMark), | |
1453 static_cast<uint32_t>(imm >> 2) & kImm26Mask); | |
1454 } else { | |
1455 j(imm); | |
paul.l...
2015/08/01 17:02:22
typo: this should be jal()
| |
1456 } | |
1418 } | 1457 } |
1419 | 1458 |
1420 | 1459 |
1421 void Assembler::jr(Register rs) { | 1460 void Assembler::jr(Register rs) { |
1422 if (kArchVariant != kMips64r6) { | 1461 if (kArchVariant != kMips64r6) { |
1423 BlockTrampolinePoolScope block_trampoline_pool(this); | 1462 BlockTrampolinePoolScope block_trampoline_pool(this); |
1424 if (rs.is(ra)) { | 1463 if (rs.is(ra)) { |
1425 positions_recorder()->WriteRecordedPositions(); | 1464 positions_recorder()->WriteRecordedPositions(); |
1426 } | 1465 } |
1427 GenInstrRegister(SPECIAL, rs, zero_reg, zero_reg, 0, JR); | 1466 GenInstrRegister(SPECIAL, rs, zero_reg, zero_reg, 0, JR); |
1428 BlockTrampolinePoolFor(1); // For associated delay slot. | 1467 BlockTrampolinePoolFor(1); // For associated delay slot. |
1429 } else { | 1468 } else { |
1430 jalr(rs, zero_reg); | 1469 jalr(rs, zero_reg); |
1431 } | 1470 } |
1432 } | 1471 } |
1433 | 1472 |
1434 | 1473 |
1435 void Assembler::jal(int64_t target) { | 1474 void Assembler::jal(int64_t target) { |
1436 #ifdef DEBUG | |
1437 // Get pc of delay slot. | |
1438 if (target != kEndOfJumpChain) { | |
1439 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); | |
1440 bool in_range = ((ipc ^ static_cast<uint64_t>(target)) >> | |
1441 (kImm26Bits + kImmFieldShift)) == 0; | |
1442 DCHECK(in_range && ((target & 3) == 0)); | |
1443 } | |
1444 #endif | |
1445 positions_recorder()->WriteRecordedPositions(); | 1475 positions_recorder()->WriteRecordedPositions(); |
1446 GenInstrJump(JAL, static_cast<uint32_t>(target >> 2) & kImm26Mask); | 1476 GenInstrJump(JAL, static_cast<uint32_t>(target >> 2) & kImm26Mask); |
1447 } | 1477 } |
1448 | 1478 |
1449 | 1479 |
1450 void Assembler::jalr(Register rs, Register rd) { | 1480 void Assembler::jalr(Register rs, Register rd) { |
1451 DCHECK(rs.code() != rd.code()); | 1481 DCHECK(rs.code() != rd.code()); |
1452 BlockTrampolinePoolScope block_trampoline_pool(this); | 1482 BlockTrampolinePoolScope block_trampoline_pool(this); |
1453 positions_recorder()->WriteRecordedPositions(); | 1483 positions_recorder()->WriteRecordedPositions(); |
1454 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 0, JALR); | 1484 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 0, JALR); |
(...skipping 1458 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2913 if (RelocInfo::IsInternalReference(rmode)) { | 2943 if (RelocInfo::IsInternalReference(rmode)) { |
2914 int64_t* p = reinterpret_cast<int64_t*>(pc); | 2944 int64_t* p = reinterpret_cast<int64_t*>(pc); |
2915 if (*p == kEndOfJumpChain) { | 2945 if (*p == kEndOfJumpChain) { |
2916 return 0; // Number of instructions patched. | 2946 return 0; // Number of instructions patched. |
2917 } | 2947 } |
2918 *p += pc_delta; | 2948 *p += pc_delta; |
2919 return 2; // Number of instructions patched. | 2949 return 2; // Number of instructions patched. |
2920 } | 2950 } |
2921 Instr instr = instr_at(pc); | 2951 Instr instr = instr_at(pc); |
2922 DCHECK(RelocInfo::IsInternalReferenceEncoded(rmode)); | 2952 DCHECK(RelocInfo::IsInternalReferenceEncoded(rmode)); |
2923 DCHECK(IsJ(instr) || IsLui(instr) || IsJal(instr)); | |
2924 if (IsLui(instr)) { | 2953 if (IsLui(instr)) { |
2925 Instr instr_lui = instr_at(pc + 0 * Assembler::kInstrSize); | 2954 Instr instr_lui = instr_at(pc + 0 * Assembler::kInstrSize); |
2926 Instr instr_ori = instr_at(pc + 1 * Assembler::kInstrSize); | 2955 Instr instr_ori = instr_at(pc + 1 * Assembler::kInstrSize); |
2927 Instr instr_ori2 = instr_at(pc + 3 * Assembler::kInstrSize); | 2956 Instr instr_ori2 = instr_at(pc + 3 * Assembler::kInstrSize); |
2928 DCHECK(IsOri(instr_ori)); | 2957 DCHECK(IsOri(instr_ori)); |
2929 DCHECK(IsOri(instr_ori2)); | 2958 DCHECK(IsOri(instr_ori2)); |
2930 // TODO(plind): symbolic names for the shifts. | 2959 // TODO(plind): symbolic names for the shifts. |
2931 int64_t imm = (instr_lui & static_cast<int64_t>(kImm16Mask)) << 48; | 2960 int64_t imm = (instr_lui & static_cast<int64_t>(kImm16Mask)) << 48; |
2932 imm |= (instr_ori & static_cast<int64_t>(kImm16Mask)) << 32; | 2961 imm |= (instr_ori & static_cast<int64_t>(kImm16Mask)) << 32; |
2933 imm |= (instr_ori2 & static_cast<int64_t>(kImm16Mask)) << 16; | 2962 imm |= (instr_ori2 & static_cast<int64_t>(kImm16Mask)) << 16; |
(...skipping 10 matching lines...) Expand all Loading... | |
2944 instr_ori &= ~kImm16Mask; | 2973 instr_ori &= ~kImm16Mask; |
2945 instr_ori2 &= ~kImm16Mask; | 2974 instr_ori2 &= ~kImm16Mask; |
2946 | 2975 |
2947 instr_at_put(pc + 0 * Assembler::kInstrSize, | 2976 instr_at_put(pc + 0 * Assembler::kInstrSize, |
2948 instr_lui | ((imm >> 32) & kImm16Mask)); | 2977 instr_lui | ((imm >> 32) & kImm16Mask)); |
2949 instr_at_put(pc + 1 * Assembler::kInstrSize, | 2978 instr_at_put(pc + 1 * Assembler::kInstrSize, |
2950 instr_ori | (imm >> 16 & kImm16Mask)); | 2979 instr_ori | (imm >> 16 & kImm16Mask)); |
2951 instr_at_put(pc + 3 * Assembler::kInstrSize, | 2980 instr_at_put(pc + 3 * Assembler::kInstrSize, |
2952 instr_ori2 | (imm & kImm16Mask)); | 2981 instr_ori2 | (imm & kImm16Mask)); |
2953 return 4; // Number of instructions patched. | 2982 return 4; // Number of instructions patched. |
2954 } else { | 2983 } else if (IsJ(instr) || IsJal(instr)) { |
2984 // Regular j/jal relocation. | |
2955 uint32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; | 2985 uint32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; |
2956 if (static_cast<int32_t>(imm28) == kEndOfJumpChain) { | |
2957 return 0; // Number of instructions patched. | |
2958 } | |
2959 | |
2960 imm28 += pc_delta; | 2986 imm28 += pc_delta; |
2961 imm28 &= kImm28Mask; | 2987 imm28 &= kImm28Mask; |
2988 instr &= ~kImm26Mask; | |
2962 DCHECK((imm28 & 3) == 0); | 2989 DCHECK((imm28 & 3) == 0); |
2963 | 2990 uint32_t imm26 = static_cast<uint32_t>(imm28 >> 2); |
2964 instr &= ~kImm26Mask; | |
2965 uint32_t imm26 = imm28 >> 2; | |
2966 DCHECK(is_uint26(imm26)); | |
2967 | |
2968 instr_at_put(pc, instr | (imm26 & kImm26Mask)); | 2991 instr_at_put(pc, instr | (imm26 & kImm26Mask)); |
2969 return 1; // Number of instructions patched. | 2992 return 1; // Number of instructions patched. |
2993 } else { | |
2994 // Unbox raw offset and emit j/jal. | |
2995 int32_t imm28 = (instr & static_cast<int32_t>(kImm26Mask)) << 2; | |
2996 // Sign extend 28-bit offset to 32-bit. | |
2997 imm28 = (imm28 << 4) >> 4; | |
2998 uint64_t target = | |
2999 static_cast<int64_t>(imm28) + reinterpret_cast<uint64_t>(pc); | |
3000 target &= kImm28Mask; | |
3001 DCHECK((imm28 & 3) == 0); | |
3002 uint32_t imm26 = static_cast<uint32_t>(target >> 2); | |
3003 // Check markings whether to emit j or jal. | |
3004 uint32_t unbox = (instr & kJRawMark) ? J : JAL; | |
3005 instr_at_put(pc, unbox | (imm26 & kImm26Mask)); | |
3006 return 1; // Number of instructions patched. | |
2970 } | 3007 } |
2971 } | 3008 } |
2972 | 3009 |
2973 | 3010 |
2974 void Assembler::GrowBuffer() { | 3011 void Assembler::GrowBuffer() { |
2975 if (!own_buffer_) FATAL("external code buffer is too small"); | 3012 if (!own_buffer_) FATAL("external code buffer is too small"); |
2976 | 3013 |
2977 // Compute new buffer size. | 3014 // Compute new buffer size. |
2978 CodeDesc desc; // The new buffer. | 3015 CodeDesc desc; // The new buffer. |
2979 if (buffer_size_ < 1 * MB) { | 3016 if (buffer_size_ < 1 * MB) { |
(...skipping 22 matching lines...) Expand all Loading... | |
3002 DeleteArray(buffer_); | 3039 DeleteArray(buffer_); |
3003 buffer_ = desc.buffer; | 3040 buffer_ = desc.buffer; |
3004 buffer_size_ = desc.buffer_size; | 3041 buffer_size_ = desc.buffer_size; |
3005 pc_ += pc_delta; | 3042 pc_ += pc_delta; |
3006 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, | 3043 reloc_info_writer.Reposition(reloc_info_writer.pos() + rc_delta, |
3007 reloc_info_writer.last_pc() + pc_delta); | 3044 reloc_info_writer.last_pc() + pc_delta); |
3008 | 3045 |
3009 // Relocate runtime entries. | 3046 // Relocate runtime entries. |
3010 for (RelocIterator it(desc); !it.done(); it.next()) { | 3047 for (RelocIterator it(desc); !it.done(); it.next()) { |
3011 RelocInfo::Mode rmode = it.rinfo()->rmode(); | 3048 RelocInfo::Mode rmode = it.rinfo()->rmode(); |
3012 if (rmode == RelocInfo::INTERNAL_REFERENCE_ENCODED || | 3049 if (rmode == RelocInfo::INTERNAL_REFERENCE) { |
3013 rmode == RelocInfo::INTERNAL_REFERENCE) { | |
3014 byte* p = reinterpret_cast<byte*>(it.rinfo()->pc()); | 3050 byte* p = reinterpret_cast<byte*>(it.rinfo()->pc()); |
3015 RelocateInternalReference(rmode, p, pc_delta); | 3051 RelocateInternalReference(rmode, p, pc_delta); |
3016 } | 3052 } |
3017 } | 3053 } |
3018 DCHECK(!overflow()); | 3054 DCHECK(!overflow()); |
3019 } | 3055 } |
3020 | 3056 |
3021 | 3057 |
3022 void Assembler::db(uint8_t data) { | 3058 void Assembler::db(uint8_t data) { |
3023 CheckBuffer(); | 3059 CheckBuffer(); |
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3123 DCHECK(unbound_labels_count_ >= 0); | 3159 DCHECK(unbound_labels_count_ >= 0); |
3124 if (unbound_labels_count_ > 0) { | 3160 if (unbound_labels_count_ > 0) { |
3125 // First we emit jump (2 instructions), then we emit trampoline pool. | 3161 // First we emit jump (2 instructions), then we emit trampoline pool. |
3126 { BlockTrampolinePoolScope block_trampoline_pool(this); | 3162 { BlockTrampolinePoolScope block_trampoline_pool(this); |
3127 Label after_pool; | 3163 Label after_pool; |
3128 b(&after_pool); | 3164 b(&after_pool); |
3129 nop(); | 3165 nop(); |
3130 | 3166 |
3131 int pool_start = pc_offset(); | 3167 int pool_start = pc_offset(); |
3132 for (int i = 0; i < unbound_labels_count_; i++) { | 3168 for (int i = 0; i < unbound_labels_count_; i++) { |
3133 uint64_t imm64; | |
3134 imm64 = jump_address(&after_pool); | |
3135 { BlockGrowBufferScope block_buf_growth(this); | 3169 { BlockGrowBufferScope block_buf_growth(this); |
3136 // Buffer growth (and relocation) must be blocked for internal | 3170 // Buffer growth (and relocation) must be blocked for internal |
3137 // references until associated instructions are emitted and available | 3171 // references until associated instructions are emitted and available |
3138 // to be patched. | 3172 // to be patched. |
3139 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 3173 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
3140 j(imm64); | 3174 j(&after_pool); |
3141 } | 3175 } |
3142 nop(); | 3176 nop(); |
3143 } | 3177 } |
3144 bind(&after_pool); | 3178 bind(&after_pool); |
3145 trampoline_ = Trampoline(pool_start, unbound_labels_count_); | 3179 trampoline_ = Trampoline(pool_start, unbound_labels_count_); |
3146 | 3180 |
3147 trampoline_emitted_ = true; | 3181 trampoline_emitted_ = true; |
3148 // As we are only going to emit trampoline once, we need to prevent any | 3182 // As we are only going to emit trampoline once, we need to prevent any |
3149 // further emission. | 3183 // further emission. |
3150 next_buffer_check_ = kMaxInt; | 3184 next_buffer_check_ = kMaxInt; |
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3240 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { | 3274 if (icache_flush_mode != SKIP_ICACHE_FLUSH) { |
3241 CpuFeatures::FlushICache(pc, 4 * Assembler::kInstrSize); | 3275 CpuFeatures::FlushICache(pc, 4 * Assembler::kInstrSize); |
3242 } | 3276 } |
3243 } | 3277 } |
3244 | 3278 |
3245 | 3279 |
3246 } // namespace internal | 3280 } // namespace internal |
3247 } // namespace v8 | 3281 } // namespace v8 |
3248 | 3282 |
3249 #endif // V8_TARGET_ARCH_MIPS64 | 3283 #endif // V8_TARGET_ARCH_MIPS64 |
OLD | NEW |