| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions are | 5 // modification, are permitted provided that the following conditions are |
| 6 // met: | 6 // met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 467 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 478 return opcode == BEQ || | 478 return opcode == BEQ || |
| 479 opcode == BNE || | 479 opcode == BNE || |
| 480 opcode == BLEZ || | 480 opcode == BLEZ || |
| 481 opcode == BGTZ || | 481 opcode == BGTZ || |
| 482 opcode == BEQL || | 482 opcode == BEQL || |
| 483 opcode == BNEL || | 483 opcode == BNEL || |
| 484 opcode == BLEZL || | 484 opcode == BLEZL || |
| 485 opcode == BGTZL || | 485 opcode == BGTZL || |
| 486 (opcode == REGIMM && (rt_field == BLTZ || rt_field == BGEZ || | 486 (opcode == REGIMM && (rt_field == BLTZ || rt_field == BGEZ || |
| 487 rt_field == BLTZAL || rt_field == BGEZAL)) || | 487 rt_field == BLTZAL || rt_field == BGEZAL)) || |
| 488 (opcode == COP1 && rs_field == BC1); // Coprocessor branch. | 488 (opcode == COP1 && rs_field == BC1) || // Coprocessor branch. |
| 489 (opcode == COP1 && rs_field == BC1EQZ) || |
| 490 (opcode == COP1 && rs_field == BC1NEZ); |
| 489 } | 491 } |
| 490 | 492 |
| 491 | 493 |
| 492 bool Assembler::IsEmittedConstant(Instr instr) { | 494 bool Assembler::IsEmittedConstant(Instr instr) { |
| 493 uint32_t label_constant = GetLabelConst(instr); | 495 uint32_t label_constant = GetLabelConst(instr); |
| 494 return label_constant == 0; // Emitted label const in reg-exp engine. | 496 return label_constant == 0; // Emitted label const in reg-exp engine. |
| 495 } | 497 } |
| 496 | 498 |
| 497 | 499 |
| 498 bool Assembler::IsBeq(Instr instr) { | 500 bool Assembler::IsBeq(Instr instr) { |
| (...skipping 463 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 962 ASSERT(is_uint26(address)); | 964 ASSERT(is_uint26(address)); |
| 963 Instr instr = opcode | address; | 965 Instr instr = opcode | address; |
| 964 emit(instr); | 966 emit(instr); |
| 965 BlockTrampolinePoolFor(1); // For associated delay slot. | 967 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 966 } | 968 } |
| 967 | 969 |
| 968 | 970 |
| 969 // Returns the next free trampoline entry. | 971 // Returns the next free trampoline entry. |
| 970 int32_t Assembler::get_trampoline_entry(int32_t pos) { | 972 int32_t Assembler::get_trampoline_entry(int32_t pos) { |
| 971 int32_t trampoline_entry = kInvalidSlotPos; | 973 int32_t trampoline_entry = kInvalidSlotPos; |
| 972 | |
| 973 if (!internal_trampoline_exception_) { | 974 if (!internal_trampoline_exception_) { |
| 974 if (trampoline_.start() > pos) { | 975 if (trampoline_.start() > pos) { |
| 975 trampoline_entry = trampoline_.take_slot(); | 976 trampoline_entry = trampoline_.take_slot(); |
| 976 } | 977 } |
| 977 | 978 |
| 978 if (kInvalidSlotPos == trampoline_entry) { | 979 if (kInvalidSlotPos == trampoline_entry) { |
| 979 internal_trampoline_exception_ = true; | 980 internal_trampoline_exception_ = true; |
| 980 } | 981 } |
| 981 } | 982 } |
| 982 return trampoline_entry; | 983 return trampoline_entry; |
| 983 } | 984 } |
| 984 | 985 |
| 985 | 986 |
| 986 uint64_t Assembler::jump_address(Label* L) { | 987 uint64_t Assembler::jump_address(Label* L) { |
| 987 int64_t target_pos; | 988 int64_t target_pos; |
| 988 | |
| 989 if (L->is_bound()) { | 989 if (L->is_bound()) { |
| 990 target_pos = L->pos(); | 990 target_pos = L->pos(); |
| 991 } else { | 991 } else { |
| 992 if (L->is_linked()) { | 992 if (L->is_linked()) { |
| 993 target_pos = L->pos(); // L's link. | 993 target_pos = L->pos(); // L's link. |
| 994 L->link_to(pc_offset()); | 994 L->link_to(pc_offset()); |
| 995 } else { | 995 } else { |
| 996 L->link_to(pc_offset()); | 996 L->link_to(pc_offset()); |
| 997 return kEndOfJumpChain; | 997 return kEndOfJumpChain; |
| 998 } | 998 } |
| 999 } | 999 } |
| 1000 | 1000 |
| 1001 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; | 1001 uint64_t imm = reinterpret_cast<uint64_t>(buffer_) + target_pos; |
| 1002 ASSERT((imm & 3) == 0); | 1002 ASSERT((imm & 3) == 0); |
| 1003 | 1003 |
| 1004 return imm; | 1004 return imm; |
| 1005 } | 1005 } |
| 1006 | 1006 |
| 1007 | 1007 |
| 1008 int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { | 1008 int32_t Assembler::branch_offset(Label* L, bool jump_elimination_allowed) { |
| 1009 int32_t target_pos; | 1009 int32_t target_pos; |
| 1010 | |
| 1011 if (L->is_bound()) { | 1010 if (L->is_bound()) { |
| 1012 target_pos = L->pos(); | 1011 target_pos = L->pos(); |
| 1013 } else { | 1012 } else { |
| 1014 if (L->is_linked()) { | 1013 if (L->is_linked()) { |
| 1015 target_pos = L->pos(); | 1014 target_pos = L->pos(); |
| 1016 L->link_to(pc_offset()); | 1015 L->link_to(pc_offset()); |
| 1017 } else { | 1016 } else { |
| 1018 L->link_to(pc_offset()); | 1017 L->link_to(pc_offset()); |
| 1019 if (!trampoline_emitted_) { | 1018 if (!trampoline_emitted_) { |
| 1020 unbound_labels_count_++; | 1019 unbound_labels_count_++; |
| 1021 next_buffer_check_ -= kTrampolineSlotsSize; | 1020 next_buffer_check_ -= kTrampolineSlotsSize; |
| 1022 } | 1021 } |
| 1023 return kEndOfChain; | 1022 return kEndOfChain; |
| 1024 } | 1023 } |
| 1025 } | 1024 } |
| 1026 | 1025 |
| 1027 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset); | 1026 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset); |
| 1028 ASSERT((offset & 3) == 0); | 1027 ASSERT((offset & 3) == 0); |
| 1029 ASSERT(is_int16(offset >> 2)); | 1028 ASSERT(is_int16(offset >> 2)); |
| 1030 | 1029 |
| 1031 return offset; | 1030 return offset; |
| 1032 } | 1031 } |
| 1033 | 1032 |
| 1034 | 1033 |
| 1034 int32_t Assembler::branch_offset_compact(Label* L, |
| 1035 bool jump_elimination_allowed) { |
| 1036 int32_t target_pos; |
| 1037 if (L->is_bound()) { |
| 1038 target_pos = L->pos(); |
| 1039 } else { |
| 1040 if (L->is_linked()) { |
| 1041 target_pos = L->pos(); |
| 1042 L->link_to(pc_offset()); |
| 1043 } else { |
| 1044 L->link_to(pc_offset()); |
| 1045 if (!trampoline_emitted_) { |
| 1046 unbound_labels_count_++; |
| 1047 next_buffer_check_ -= kTrampolineSlotsSize; |
| 1048 } |
| 1049 return kEndOfChain; |
| 1050 } |
| 1051 } |
| 1052 |
| 1053 int32_t offset = target_pos - pc_offset(); |
| 1054 ASSERT((offset & 3) == 0); |
| 1055 ASSERT(is_int16(offset >> 2)); |
| 1056 |
| 1057 return offset; |
| 1058 } |
| 1059 |
| 1060 |
| 1061 int32_t Assembler::branch_offset21(Label* L, bool jump_elimination_allowed) { |
| 1062 int32_t target_pos; |
| 1063 if (L->is_bound()) { |
| 1064 target_pos = L->pos(); |
| 1065 } else { |
| 1066 if (L->is_linked()) { |
| 1067 target_pos = L->pos(); |
| 1068 L->link_to(pc_offset()); |
| 1069 } else { |
| 1070 L->link_to(pc_offset()); |
| 1071 if (!trampoline_emitted_) { |
| 1072 unbound_labels_count_++; |
| 1073 next_buffer_check_ -= kTrampolineSlotsSize; |
| 1074 } |
| 1075 return kEndOfChain; |
| 1076 } |
| 1077 } |
| 1078 |
| 1079 int32_t offset = target_pos - (pc_offset() + kBranchPCOffset); |
| 1080 ASSERT((offset & 3) == 0); |
| 1081 ASSERT(((offset >> 2) & 0xFFE00000) == 0); // Offset is 21bit width. |
| 1082 |
| 1083 return offset; |
| 1084 } |
| 1085 |
| 1086 |
| 1087 int32_t Assembler::branch_offset21_compact(Label* L, |
| 1088 bool jump_elimination_allowed) { |
| 1089 int32_t target_pos; |
| 1090 if (L->is_bound()) { |
| 1091 target_pos = L->pos(); |
| 1092 } else { |
| 1093 if (L->is_linked()) { |
| 1094 target_pos = L->pos(); |
| 1095 L->link_to(pc_offset()); |
| 1096 } else { |
| 1097 L->link_to(pc_offset()); |
| 1098 if (!trampoline_emitted_) { |
| 1099 unbound_labels_count_++; |
| 1100 next_buffer_check_ -= kTrampolineSlotsSize; |
| 1101 } |
| 1102 return kEndOfChain; |
| 1103 } |
| 1104 } |
| 1105 |
| 1106 int32_t offset = target_pos - pc_offset(); |
| 1107 ASSERT((offset & 3) == 0); |
| 1108 ASSERT(((offset >> 2) & 0xFFE00000) == 0); // Offset is 21bit width. |
| 1109 |
| 1110 return offset; |
| 1111 } |
| 1112 |
| 1113 |
| 1035 void Assembler::label_at_put(Label* L, int at_offset) { | 1114 void Assembler::label_at_put(Label* L, int at_offset) { |
| 1036 int target_pos; | 1115 int target_pos; |
| 1037 if (L->is_bound()) { | 1116 if (L->is_bound()) { |
| 1038 target_pos = L->pos(); | 1117 target_pos = L->pos(); |
| 1039 instr_at_put(at_offset, target_pos + (Code::kHeaderSize - kHeapObjectTag)); | 1118 instr_at_put(at_offset, target_pos + (Code::kHeaderSize - kHeapObjectTag)); |
| 1040 } else { | 1119 } else { |
| 1041 if (L->is_linked()) { | 1120 if (L->is_linked()) { |
| 1042 target_pos = L->pos(); // L's link. | 1121 target_pos = L->pos(); // L's link. |
| 1043 int32_t imm18 = target_pos - at_offset; | 1122 int32_t imm18 = target_pos - at_offset; |
| 1044 ASSERT((imm18 & 3) == 0); | 1123 ASSERT((imm18 & 3) == 0); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1078 } | 1157 } |
| 1079 | 1158 |
| 1080 | 1159 |
| 1081 void Assembler::bgez(Register rs, int16_t offset) { | 1160 void Assembler::bgez(Register rs, int16_t offset) { |
| 1082 BlockTrampolinePoolScope block_trampoline_pool(this); | 1161 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1083 GenInstrImmediate(REGIMM, rs, BGEZ, offset); | 1162 GenInstrImmediate(REGIMM, rs, BGEZ, offset); |
| 1084 BlockTrampolinePoolFor(1); // For associated delay slot. | 1163 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1085 } | 1164 } |
| 1086 | 1165 |
| 1087 | 1166 |
| 1167 void Assembler::bgezc(Register rt, int16_t offset) { |
| 1168 ASSERT(kArchVariant == kMips64r6); |
| 1169 ASSERT(!(rt.is(zero_reg))); |
| 1170 GenInstrImmediate(BLEZL, rt, rt, offset); |
| 1171 } |
| 1172 |
| 1173 |
| 1174 void Assembler::bgeuc(Register rs, Register rt, int16_t offset) { |
| 1175 ASSERT(kArchVariant == kMips64r6); |
| 1176 ASSERT(!(rs.is(zero_reg))); |
| 1177 ASSERT(!(rt.is(zero_reg))); |
| 1178 ASSERT(rs.code() != rt.code()); |
| 1179 GenInstrImmediate(BLEZ, rs, rt, offset); |
| 1180 } |
| 1181 |
| 1182 |
| 1183 void Assembler::bgec(Register rs, Register rt, int16_t offset) { |
| 1184 ASSERT(kArchVariant == kMips64r6); |
| 1185 ASSERT(!(rs.is(zero_reg))); |
| 1186 ASSERT(!(rt.is(zero_reg))); |
| 1187 ASSERT(rs.code() != rt.code()); |
| 1188 GenInstrImmediate(BLEZL, rs, rt, offset); |
| 1189 } |
| 1190 |
| 1191 |
| 1088 void Assembler::bgezal(Register rs, int16_t offset) { | 1192 void Assembler::bgezal(Register rs, int16_t offset) { |
| 1193 ASSERT(kArchVariant != kMips64r6 || rs.is(zero_reg)); |
| 1089 BlockTrampolinePoolScope block_trampoline_pool(this); | 1194 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1090 positions_recorder()->WriteRecordedPositions(); | 1195 positions_recorder()->WriteRecordedPositions(); |
| 1091 GenInstrImmediate(REGIMM, rs, BGEZAL, offset); | 1196 GenInstrImmediate(REGIMM, rs, BGEZAL, offset); |
| 1092 BlockTrampolinePoolFor(1); // For associated delay slot. | 1197 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1093 } | 1198 } |
| 1094 | 1199 |
| 1095 | 1200 |
| 1096 void Assembler::bgtz(Register rs, int16_t offset) { | 1201 void Assembler::bgtz(Register rs, int16_t offset) { |
| 1097 BlockTrampolinePoolScope block_trampoline_pool(this); | 1202 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1098 GenInstrImmediate(BGTZ, rs, zero_reg, offset); | 1203 GenInstrImmediate(BGTZ, rs, zero_reg, offset); |
| 1099 BlockTrampolinePoolFor(1); // For associated delay slot. | 1204 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1100 } | 1205 } |
| 1101 | 1206 |
| 1102 | 1207 |
| 1208 void Assembler::bgtzc(Register rt, int16_t offset) { |
| 1209 ASSERT(kArchVariant == kMips64r6); |
| 1210 ASSERT(!(rt.is(zero_reg))); |
| 1211 GenInstrImmediate(BGTZL, zero_reg, rt, offset); |
| 1212 } |
| 1213 |
| 1214 |
| 1103 void Assembler::blez(Register rs, int16_t offset) { | 1215 void Assembler::blez(Register rs, int16_t offset) { |
| 1104 BlockTrampolinePoolScope block_trampoline_pool(this); | 1216 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1105 GenInstrImmediate(BLEZ, rs, zero_reg, offset); | 1217 GenInstrImmediate(BLEZ, rs, zero_reg, offset); |
| 1106 BlockTrampolinePoolFor(1); // For associated delay slot. | 1218 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1107 } | 1219 } |
| 1108 | 1220 |
| 1109 | 1221 |
| 1222 void Assembler::blezc(Register rt, int16_t offset) { |
| 1223 ASSERT(kArchVariant == kMips64r6); |
| 1224 ASSERT(!(rt.is(zero_reg))); |
| 1225 GenInstrImmediate(BLEZL, zero_reg, rt, offset); |
| 1226 } |
| 1227 |
| 1228 |
| 1229 void Assembler::bltzc(Register rt, int16_t offset) { |
| 1230 ASSERT(kArchVariant == kMips64r6); |
| 1231 ASSERT(!(rt.is(zero_reg))); |
| 1232 GenInstrImmediate(BGTZL, rt, rt, offset); |
| 1233 } |
| 1234 |
| 1235 |
| 1236 void Assembler::bltuc(Register rs, Register rt, int16_t offset) { |
| 1237 ASSERT(kArchVariant == kMips64r6); |
| 1238 ASSERT(!(rs.is(zero_reg))); |
| 1239 ASSERT(!(rt.is(zero_reg))); |
| 1240 ASSERT(rs.code() != rt.code()); |
| 1241 GenInstrImmediate(BGTZ, rs, rt, offset); |
| 1242 } |
| 1243 |
| 1244 |
| 1245 void Assembler::bltc(Register rs, Register rt, int16_t offset) { |
| 1246 ASSERT(kArchVariant == kMips64r6); |
| 1247 ASSERT(!(rs.is(zero_reg))); |
| 1248 ASSERT(!(rt.is(zero_reg))); |
| 1249 ASSERT(rs.code() != rt.code()); |
| 1250 GenInstrImmediate(BGTZL, rs, rt, offset); |
| 1251 } |
| 1252 |
| 1253 |
| 1110 void Assembler::bltz(Register rs, int16_t offset) { | 1254 void Assembler::bltz(Register rs, int16_t offset) { |
| 1111 BlockTrampolinePoolScope block_trampoline_pool(this); | 1255 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1112 GenInstrImmediate(REGIMM, rs, BLTZ, offset); | 1256 GenInstrImmediate(REGIMM, rs, BLTZ, offset); |
| 1113 BlockTrampolinePoolFor(1); // For associated delay slot. | 1257 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1114 } | 1258 } |
| 1115 | 1259 |
| 1116 | 1260 |
| 1117 void Assembler::bltzal(Register rs, int16_t offset) { | 1261 void Assembler::bltzal(Register rs, int16_t offset) { |
| 1262 ASSERT(kArchVariant != kMips64r6 || rs.is(zero_reg)); |
| 1118 BlockTrampolinePoolScope block_trampoline_pool(this); | 1263 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1119 positions_recorder()->WriteRecordedPositions(); | 1264 positions_recorder()->WriteRecordedPositions(); |
| 1120 GenInstrImmediate(REGIMM, rs, BLTZAL, offset); | 1265 GenInstrImmediate(REGIMM, rs, BLTZAL, offset); |
| 1121 BlockTrampolinePoolFor(1); // For associated delay slot. | 1266 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1122 } | 1267 } |
| 1123 | 1268 |
| 1124 | 1269 |
| 1125 void Assembler::bne(Register rs, Register rt, int16_t offset) { | 1270 void Assembler::bne(Register rs, Register rt, int16_t offset) { |
| 1126 BlockTrampolinePoolScope block_trampoline_pool(this); | 1271 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1127 GenInstrImmediate(BNE, rs, rt, offset); | 1272 GenInstrImmediate(BNE, rs, rt, offset); |
| 1128 BlockTrampolinePoolFor(1); // For associated delay slot. | 1273 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1129 } | 1274 } |
| 1130 | 1275 |
| 1131 | 1276 |
| 1277 void Assembler::bovc(Register rs, Register rt, int16_t offset) { |
| 1278 ASSERT(kArchVariant == kMips64r6); |
| 1279 ASSERT(!(rs.is(zero_reg))); |
| 1280 ASSERT(rs.code() >= rt.code()); |
| 1281 GenInstrImmediate(ADDI, rs, rt, offset); |
| 1282 } |
| 1283 |
| 1284 |
| 1285 void Assembler::bnvc(Register rs, Register rt, int16_t offset) { |
| 1286 ASSERT(kArchVariant == kMips64r6); |
| 1287 ASSERT(!(rs.is(zero_reg))); |
| 1288 ASSERT(rs.code() >= rt.code()); |
| 1289 GenInstrImmediate(DADDI, rs, rt, offset); |
| 1290 } |
| 1291 |
| 1292 |
| 1293 void Assembler::blezalc(Register rt, int16_t offset) { |
| 1294 ASSERT(kArchVariant == kMips64r6); |
| 1295 ASSERT(!(rt.is(zero_reg))); |
| 1296 GenInstrImmediate(BLEZ, zero_reg, rt, offset); |
| 1297 } |
| 1298 |
| 1299 |
| 1300 void Assembler::bgezalc(Register rt, int16_t offset) { |
| 1301 ASSERT(kArchVariant == kMips64r6); |
| 1302 ASSERT(!(rt.is(zero_reg))); |
| 1303 GenInstrImmediate(BLEZ, rt, rt, offset); |
| 1304 } |
| 1305 |
| 1306 |
| 1307 void Assembler::bgezall(Register rs, int16_t offset) { |
| 1308 ASSERT(kArchVariant == kMips64r6); |
| 1309 ASSERT(!(rs.is(zero_reg))); |
| 1310 GenInstrImmediate(REGIMM, rs, BGEZALL, offset); |
| 1311 } |
| 1312 |
| 1313 |
| 1314 void Assembler::bltzalc(Register rt, int16_t offset) { |
| 1315 ASSERT(kArchVariant == kMips64r6); |
| 1316 ASSERT(!(rt.is(zero_reg))); |
| 1317 GenInstrImmediate(BGTZ, rt, rt, offset); |
| 1318 } |
| 1319 |
| 1320 |
| 1321 void Assembler::bgtzalc(Register rt, int16_t offset) { |
| 1322 ASSERT(kArchVariant == kMips64r6); |
| 1323 ASSERT(!(rt.is(zero_reg))); |
| 1324 GenInstrImmediate(BGTZ, zero_reg, rt, offset); |
| 1325 } |
| 1326 |
| 1327 |
| 1328 void Assembler::beqzalc(Register rt, int16_t offset) { |
| 1329 ASSERT(kArchVariant == kMips64r6); |
| 1330 ASSERT(!(rt.is(zero_reg))); |
| 1331 GenInstrImmediate(ADDI, zero_reg, rt, offset); |
| 1332 } |
| 1333 |
| 1334 |
| 1335 void Assembler::bnezalc(Register rt, int16_t offset) { |
| 1336 ASSERT(kArchVariant == kMips64r6); |
| 1337 ASSERT(!(rt.is(zero_reg))); |
| 1338 GenInstrImmediate(DADDI, zero_reg, rt, offset); |
| 1339 } |
| 1340 |
| 1341 |
| 1342 void Assembler::beqc(Register rs, Register rt, int16_t offset) { |
| 1343 ASSERT(kArchVariant == kMips64r6); |
| 1344 ASSERT(rs.code() < rt.code()); |
| 1345 GenInstrImmediate(ADDI, rs, rt, offset); |
| 1346 } |
| 1347 |
| 1348 |
| 1349 void Assembler::beqzc(Register rs, int32_t offset) { |
| 1350 ASSERT(kArchVariant == kMips64r6); |
| 1351 ASSERT(!(rs.is(zero_reg))); |
| 1352 Instr instr = BEQZC | (rs.code() << kRsShift) | offset; |
| 1353 emit(instr); |
| 1354 } |
| 1355 |
| 1356 |
| 1357 void Assembler::bnec(Register rs, Register rt, int16_t offset) { |
| 1358 ASSERT(kArchVariant == kMips64r6); |
| 1359 ASSERT(rs.code() < rt.code()); |
| 1360 GenInstrImmediate(DADDI, rs, rt, offset); |
| 1361 } |
| 1362 |
| 1363 |
| 1364 void Assembler::bnezc(Register rs, int32_t offset) { |
| 1365 ASSERT(kArchVariant == kMips64r6); |
| 1366 ASSERT(!(rs.is(zero_reg))); |
| 1367 Instr instr = BNEZC | (rs.code() << kRsShift) | offset; |
| 1368 emit(instr); |
| 1369 } |
| 1370 |
| 1371 |
| 1132 void Assembler::j(int64_t target) { | 1372 void Assembler::j(int64_t target) { |
| 1133 #if DEBUG | 1373 #if DEBUG |
| 1134 // Get pc of delay slot. | 1374 // Get pc of delay slot. |
| 1135 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); | 1375 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); |
| 1136 bool in_range = (ipc ^ static_cast<uint64_t>(target) >> | 1376 bool in_range = (ipc ^ static_cast<uint64_t>(target) >> |
| 1137 (kImm26Bits + kImmFieldShift)) == 0; | 1377 (kImm26Bits + kImmFieldShift)) == 0; |
| 1138 ASSERT(in_range && ((target & 3) == 0)); | 1378 ASSERT(in_range && ((target & 3) == 0)); |
| 1139 #endif | 1379 #endif |
| 1140 GenInstrJump(J, target >> 2); | 1380 GenInstrJump(J, target >> 2); |
| 1141 } | 1381 } |
| 1142 | 1382 |
| 1143 | 1383 |
| 1144 void Assembler::jr(Register rs) { | 1384 void Assembler::jr(Register rs) { |
| 1145 BlockTrampolinePoolScope block_trampoline_pool(this); | 1385 if (kArchVariant != kMips64r6) { |
| 1146 if (rs.is(ra)) { | 1386 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1147 positions_recorder()->WriteRecordedPositions(); | 1387 if (rs.is(ra)) { |
| 1388 positions_recorder()->WriteRecordedPositions(); |
| 1389 } |
| 1390 GenInstrRegister(SPECIAL, rs, zero_reg, zero_reg, 0, JR); |
| 1391 BlockTrampolinePoolFor(1); // For associated delay slot. |
| 1392 } else { |
| 1393 jalr(rs, zero_reg); |
| 1148 } | 1394 } |
| 1149 GenInstrRegister(SPECIAL, rs, zero_reg, zero_reg, 0, JR); | |
| 1150 BlockTrampolinePoolFor(1); // For associated delay slot. | |
| 1151 } | 1395 } |
| 1152 | 1396 |
| 1153 | 1397 |
| 1154 void Assembler::jal(int64_t target) { | 1398 void Assembler::jal(int64_t target) { |
| 1155 #ifdef DEBUG | 1399 #ifdef DEBUG |
| 1156 // Get pc of delay slot. | 1400 // Get pc of delay slot. |
| 1157 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); | 1401 uint64_t ipc = reinterpret_cast<uint64_t>(pc_ + 1 * kInstrSize); |
| 1158 bool in_range = (ipc ^ static_cast<uint64_t>(target) >> | 1402 bool in_range = (ipc ^ static_cast<uint64_t>(target) >> |
| 1159 (kImm26Bits + kImmFieldShift)) == 0; | 1403 (kImm26Bits + kImmFieldShift)) == 0; |
| 1160 ASSERT(in_range && ((target & 3) == 0)); | 1404 ASSERT(in_range && ((target & 3) == 0)); |
| (...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1211 GenInstrImmediate(ADDIU, rs, rd, j); | 1455 GenInstrImmediate(ADDIU, rs, rd, j); |
| 1212 } | 1456 } |
| 1213 | 1457 |
| 1214 | 1458 |
| 1215 void Assembler::subu(Register rd, Register rs, Register rt) { | 1459 void Assembler::subu(Register rd, Register rs, Register rt) { |
| 1216 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SUBU); | 1460 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SUBU); |
| 1217 } | 1461 } |
| 1218 | 1462 |
| 1219 | 1463 |
| 1220 void Assembler::mul(Register rd, Register rs, Register rt) { | 1464 void Assembler::mul(Register rd, Register rs, Register rt) { |
| 1221 GenInstrRegister(SPECIAL2, rs, rt, rd, 0, MUL); | 1465 if (kArchVariant == kMips64r6) { |
| 1466 GenInstrRegister(SPECIAL, rs, rt, rd, MUL_OP, MUL_MUH); |
| 1467 } else { |
| 1468 GenInstrRegister(SPECIAL2, rs, rt, rd, 0, MUL); |
| 1469 } |
| 1470 } |
| 1471 |
| 1472 |
| 1473 void Assembler::muh(Register rd, Register rs, Register rt) { |
| 1474 ASSERT(kArchVariant == kMips64r6); |
| 1475 GenInstrRegister(SPECIAL, rs, rt, rd, MUH_OP, MUL_MUH); |
| 1476 } |
| 1477 |
| 1478 |
| 1479 void Assembler::mulu(Register rd, Register rs, Register rt) { |
| 1480 ASSERT(kArchVariant == kMips64r6); |
| 1481 GenInstrRegister(SPECIAL, rs, rt, rd, MUL_OP, MUL_MUH_U); |
| 1482 } |
| 1483 |
| 1484 |
| 1485 void Assembler::muhu(Register rd, Register rs, Register rt) { |
| 1486 ASSERT(kArchVariant == kMips64r6); |
| 1487 GenInstrRegister(SPECIAL, rs, rt, rd, MUH_OP, MUL_MUH_U); |
| 1488 } |
| 1489 |
| 1490 |
| 1491 void Assembler::dmul(Register rd, Register rs, Register rt) { |
| 1492 ASSERT(kArchVariant == kMips64r6); |
| 1493 GenInstrRegister(SPECIAL, rs, rt, rd, MUL_OP, D_MUL_MUH); |
| 1494 } |
| 1495 |
| 1496 |
| 1497 void Assembler::dmuh(Register rd, Register rs, Register rt) { |
| 1498 ASSERT(kArchVariant == kMips64r6); |
| 1499 GenInstrRegister(SPECIAL, rs, rt, rd, MUH_OP, D_MUL_MUH); |
| 1500 } |
| 1501 |
| 1502 |
| 1503 void Assembler::dmulu(Register rd, Register rs, Register rt) { |
| 1504 ASSERT(kArchVariant == kMips64r6); |
| 1505 GenInstrRegister(SPECIAL, rs, rt, rd, MUL_OP, D_MUL_MUH_U); |
| 1506 } |
| 1507 |
| 1508 |
| 1509 void Assembler::dmuhu(Register rd, Register rs, Register rt) { |
| 1510 ASSERT(kArchVariant == kMips64r6); |
| 1511 GenInstrRegister(SPECIAL, rs, rt, rd, MUH_OP, D_MUL_MUH_U); |
| 1222 } | 1512 } |
| 1223 | 1513 |
| 1224 | 1514 |
| 1225 void Assembler::mult(Register rs, Register rt) { | 1515 void Assembler::mult(Register rs, Register rt) { |
| 1516 ASSERT(kArchVariant != kMips64r6); |
| 1226 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, MULT); | 1517 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, MULT); |
| 1227 } | 1518 } |
| 1228 | 1519 |
| 1229 | 1520 |
| 1230 void Assembler::multu(Register rs, Register rt) { | 1521 void Assembler::multu(Register rs, Register rt) { |
| 1522 ASSERT(kArchVariant != kMips64r6); |
| 1231 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, MULTU); | 1523 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, MULTU); |
| 1232 } | 1524 } |
| 1233 | 1525 |
| 1234 | 1526 |
| 1235 void Assembler::daddiu(Register rd, Register rs, int32_t j) { | 1527 void Assembler::daddiu(Register rd, Register rs, int32_t j) { |
| 1236 GenInstrImmediate(DADDIU, rs, rd, j); | 1528 GenInstrImmediate(DADDIU, rs, rd, j); |
| 1237 } | 1529 } |
| 1238 | 1530 |
| 1239 | 1531 |
| 1240 void Assembler::div(Register rs, Register rt) { | 1532 void Assembler::div(Register rs, Register rt) { |
| 1241 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DIV); | 1533 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DIV); |
| 1242 } | 1534 } |
| 1243 | 1535 |
| 1244 | 1536 |
| 1537 void Assembler::div(Register rd, Register rs, Register rt) { |
| 1538 ASSERT(kArchVariant == kMips64r6); |
| 1539 GenInstrRegister(SPECIAL, rs, rt, rd, DIV_OP, DIV_MOD); |
| 1540 } |
| 1541 |
| 1542 |
| 1543 void Assembler::mod(Register rd, Register rs, Register rt) { |
| 1544 ASSERT(kArchVariant == kMips64r6); |
| 1545 GenInstrRegister(SPECIAL, rs, rt, rd, MOD_OP, DIV_MOD); |
| 1546 } |
| 1547 |
| 1548 |
| 1245 void Assembler::divu(Register rs, Register rt) { | 1549 void Assembler::divu(Register rs, Register rt) { |
| 1246 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DIVU); | 1550 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DIVU); |
| 1247 } | 1551 } |
| 1248 | 1552 |
| 1249 | 1553 |
| 1554 void Assembler::divu(Register rd, Register rs, Register rt) { |
| 1555 ASSERT(kArchVariant == kMips64r6); |
| 1556 GenInstrRegister(SPECIAL, rs, rt, rd, DIV_OP, DIV_MOD_U); |
| 1557 } |
| 1558 |
| 1559 |
| 1560 void Assembler::modu(Register rd, Register rs, Register rt) { |
| 1561 ASSERT(kArchVariant == kMips64r6); |
| 1562 GenInstrRegister(SPECIAL, rs, rt, rd, MOD_OP, DIV_MOD_U); |
| 1563 } |
| 1564 |
| 1565 |
| 1250 void Assembler::daddu(Register rd, Register rs, Register rt) { | 1566 void Assembler::daddu(Register rd, Register rs, Register rt) { |
| 1251 GenInstrRegister(SPECIAL, rs, rt, rd, 0, DADDU); | 1567 GenInstrRegister(SPECIAL, rs, rt, rd, 0, DADDU); |
| 1252 } | 1568 } |
| 1253 | 1569 |
| 1254 | 1570 |
| 1255 void Assembler::dsubu(Register rd, Register rs, Register rt) { | 1571 void Assembler::dsubu(Register rd, Register rs, Register rt) { |
| 1256 GenInstrRegister(SPECIAL, rs, rt, rd, 0, DSUBU); | 1572 GenInstrRegister(SPECIAL, rs, rt, rd, 0, DSUBU); |
| 1257 } | 1573 } |
| 1258 | 1574 |
| 1259 | 1575 |
| 1260 void Assembler::dmult(Register rs, Register rt) { | 1576 void Assembler::dmult(Register rs, Register rt) { |
| 1261 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DMULT); | 1577 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DMULT); |
| 1262 } | 1578 } |
| 1263 | 1579 |
| 1264 | 1580 |
| 1265 void Assembler::dmultu(Register rs, Register rt) { | 1581 void Assembler::dmultu(Register rs, Register rt) { |
| 1266 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DMULTU); | 1582 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DMULTU); |
| 1267 } | 1583 } |
| 1268 | 1584 |
| 1269 | 1585 |
| 1270 void Assembler::ddiv(Register rs, Register rt) { | 1586 void Assembler::ddiv(Register rs, Register rt) { |
| 1271 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DDIV); | 1587 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DDIV); |
| 1272 } | 1588 } |
| 1273 | 1589 |
| 1274 | 1590 |
| 1591 void Assembler::ddiv(Register rd, Register rs, Register rt) { |
| 1592 ASSERT(kArchVariant == kMips64r6); |
| 1593 GenInstrRegister(SPECIAL, rs, rt, rd, DIV_OP, D_DIV_MOD); |
| 1594 } |
| 1595 |
| 1596 |
| 1597 void Assembler::dmod(Register rd, Register rs, Register rt) { |
| 1598 ASSERT(kArchVariant == kMips64r6); |
| 1599 GenInstrRegister(SPECIAL, rs, rt, rd, MOD_OP, D_DIV_MOD); |
| 1600 } |
| 1601 |
| 1602 |
| 1275 void Assembler::ddivu(Register rs, Register rt) { | 1603 void Assembler::ddivu(Register rs, Register rt) { |
| 1276 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DDIVU); | 1604 GenInstrRegister(SPECIAL, rs, rt, zero_reg, 0, DDIVU); |
| 1277 } | 1605 } |
| 1278 | 1606 |
| 1279 | 1607 |
| 1608 void Assembler::ddivu(Register rd, Register rs, Register rt) { |
| 1609 ASSERT(kArchVariant == kMips64r6); |
| 1610 GenInstrRegister(SPECIAL, rs, rt, rd, DIV_OP, D_DIV_MOD_U); |
| 1611 } |
| 1612 |
| 1613 |
| 1614 void Assembler::dmodu(Register rd, Register rs, Register rt) { |
| 1615 ASSERT(kArchVariant == kMips64r6); |
| 1616 GenInstrRegister(SPECIAL, rs, rt, rd, MOD_OP, D_DIV_MOD_U); |
| 1617 } |
| 1618 |
| 1619 |
| 1280 // Logical. | 1620 // Logical. |
| 1281 | 1621 |
| 1282 void Assembler::and_(Register rd, Register rs, Register rt) { | 1622 void Assembler::and_(Register rd, Register rs, Register rt) { |
| 1283 GenInstrRegister(SPECIAL, rs, rt, rd, 0, AND); | 1623 GenInstrRegister(SPECIAL, rs, rt, rd, 0, AND); |
| 1284 } | 1624 } |
| 1285 | 1625 |
| 1286 | 1626 |
| 1287 void Assembler::andi(Register rt, Register rs, int32_t j) { | 1627 void Assembler::andi(Register rt, Register rs, int32_t j) { |
| 1288 ASSERT(is_uint16(j)); | 1628 ASSERT(is_uint16(j)); |
| 1289 GenInstrImmediate(ANDI, rs, rt, j); | 1629 GenInstrImmediate(ANDI, rs, rt, j); |
| (...skipping 269 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1559 GenInstrImmediate(SWR, rs.rm(), rd, rs.offset_); | 1899 GenInstrImmediate(SWR, rs.rm(), rd, rs.offset_); |
| 1560 } | 1900 } |
| 1561 | 1901 |
| 1562 | 1902 |
| 1563 void Assembler::lui(Register rd, int32_t j) { | 1903 void Assembler::lui(Register rd, int32_t j) { |
| 1564 ASSERT(is_uint16(j)); | 1904 ASSERT(is_uint16(j)); |
| 1565 GenInstrImmediate(LUI, zero_reg, rd, j); | 1905 GenInstrImmediate(LUI, zero_reg, rd, j); |
| 1566 } | 1906 } |
| 1567 | 1907 |
| 1568 | 1908 |
| 1909 void Assembler::aui(Register rs, Register rt, int32_t j) { |
| 1910 // This instruction uses same opcode as 'lui'. The difference in encoding is |
| 1911 // 'lui' has zero reg. for rs field. |
| 1912 ASSERT(is_uint16(j)); |
| 1913 GenInstrImmediate(LUI, rs, rt, j); |
| 1914 } |
| 1915 |
| 1916 |
| 1917 void Assembler::daui(Register rs, Register rt, int32_t j) { |
| 1918 ASSERT(is_uint16(j)); |
| 1919 GenInstrImmediate(DAUI, rs, rt, j); |
| 1920 } |
| 1921 |
| 1922 |
| 1923 void Assembler::dahi(Register rs, int32_t j) { |
| 1924 ASSERT(is_uint16(j)); |
| 1925 GenInstrImmediate(REGIMM, rs, DAHI, j); |
| 1926 } |
| 1927 |
| 1928 |
| 1929 void Assembler::dati(Register rs, int32_t j) { |
| 1930 ASSERT(is_uint16(j)); |
| 1931 GenInstrImmediate(REGIMM, rs, DATI, j); |
| 1932 } |
| 1933 |
| 1934 |
| 1569 void Assembler::ldl(Register rd, const MemOperand& rs) { | 1935 void Assembler::ldl(Register rd, const MemOperand& rs) { |
| 1570 GenInstrImmediate(LDL, rs.rm(), rd, rs.offset_); | 1936 GenInstrImmediate(LDL, rs.rm(), rd, rs.offset_); |
| 1571 } | 1937 } |
| 1572 | 1938 |
| 1573 | 1939 |
| 1574 void Assembler::ldr(Register rd, const MemOperand& rs) { | 1940 void Assembler::ldr(Register rd, const MemOperand& rs) { |
| 1575 GenInstrImmediate(LDR, rs.rm(), rd, rs.offset_); | 1941 GenInstrImmediate(LDR, rs.rm(), rd, rs.offset_); |
| 1576 } | 1942 } |
| 1577 | 1943 |
| 1578 | 1944 |
| (...skipping 161 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1740 } | 2106 } |
| 1741 | 2107 |
| 1742 | 2108 |
| 1743 void Assembler::movf(Register rd, Register rs, uint16_t cc) { | 2109 void Assembler::movf(Register rd, Register rs, uint16_t cc) { |
| 1744 Register rt; | 2110 Register rt; |
| 1745 rt.code_ = (cc & 0x0007) << 2 | 0; | 2111 rt.code_ = (cc & 0x0007) << 2 | 0; |
| 1746 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVCI); | 2112 GenInstrRegister(SPECIAL, rs, rt, rd, 0, MOVCI); |
| 1747 } | 2113 } |
| 1748 | 2114 |
| 1749 | 2115 |
| 2116 void Assembler::sel(SecondaryField fmt, FPURegister fd, |
| 2117 FPURegister ft, FPURegister fs, uint8_t sel) { |
| 2118 ASSERT(kArchVariant == kMips64r6); |
| 2119 ASSERT(fmt == D); |
| 2120 ASSERT(fmt == S); |
| 2121 |
| 2122 Instr instr = COP1 | fmt << kRsShift | ft.code() << kFtShift | |
| 2123 fs.code() << kFsShift | fd.code() << kFdShift | SEL; |
| 2124 emit(instr); |
| 2125 } |
| 2126 |
| 2127 |
| 2128 // GPR. |
| 2129 void Assembler::seleqz(Register rs, Register rt, Register rd) { |
| 2130 ASSERT(kArchVariant == kMips64r6); |
| 2131 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SELEQZ_S); |
| 2132 } |
| 2133 |
| 2134 |
| 2135 // FPR. |
| 2136 void Assembler::seleqz(SecondaryField fmt, FPURegister fd, |
| 2137 FPURegister ft, FPURegister fs) { |
| 2138 ASSERT(kArchVariant == kMips64r6); |
| 2139 ASSERT(fmt == D); |
| 2140 ASSERT(fmt == S); |
| 2141 |
| 2142 Instr instr = COP1 | fmt << kRsShift | ft.code() << kFtShift | |
| 2143 fs.code() << kFsShift | fd.code() << kFdShift | SELEQZ_C; |
| 2144 emit(instr); |
| 2145 } |
| 2146 |
| 2147 |
| 2148 // GPR. |
| 2149 void Assembler::selnez(Register rs, Register rt, Register rd) { |
| 2150 ASSERT(kArchVariant == kMips64r6); |
| 2151 GenInstrRegister(SPECIAL, rs, rt, rd, 0, SELNEZ_S); |
| 2152 } |
| 2153 |
| 2154 |
| 2155 // FPR. |
| 2156 void Assembler::selnez(SecondaryField fmt, FPURegister fd, |
| 2157 FPURegister ft, FPURegister fs) { |
| 2158 ASSERT(kArchVariant == kMips64r6); |
| 2159 ASSERT(fmt == D); |
| 2160 ASSERT(fmt == S); |
| 2161 |
| 2162 Instr instr = COP1 | fmt << kRsShift | ft.code() << kFtShift | |
| 2163 fs.code() << kFsShift | fd.code() << kFdShift | SELNEZ_C; |
| 2164 emit(instr); |
| 2165 } |
| 2166 |
| 2167 |
| 1750 // Bit twiddling. | 2168 // Bit twiddling. |
| 1751 void Assembler::clz(Register rd, Register rs) { | 2169 void Assembler::clz(Register rd, Register rs) { |
| 1752 // Clz instr requires same GPR number in 'rd' and 'rt' fields. | 2170 if (kArchVariant != kMips64r6) { |
| 1753 GenInstrRegister(SPECIAL2, rs, rd, rd, 0, CLZ); | 2171 // Clz instr requires same GPR number in 'rd' and 'rt' fields. |
| 2172 GenInstrRegister(SPECIAL2, rs, rd, rd, 0, CLZ); |
| 2173 } else { |
| 2174 GenInstrRegister(SPECIAL, rs, zero_reg, rd, 1, CLZ_R6); |
| 2175 } |
| 1754 } | 2176 } |
| 1755 | 2177 |
| 1756 | 2178 |
| 1757 void Assembler::ins_(Register rt, Register rs, uint16_t pos, uint16_t size) { | 2179 void Assembler::ins_(Register rt, Register rs, uint16_t pos, uint16_t size) { |
| 1758 // Should be called via MacroAssembler::Ins. | 2180 // Should be called via MacroAssembler::Ins. |
| 1759 // Ins instr has 'rt' field as dest, and two uint5: msb, lsb. | 2181 // Ins instr has 'rt' field as dest, and two uint5: msb, lsb. |
| 1760 ASSERT(kArchVariant == kMips64r2); | 2182 ASSERT((kArchVariant == kMips64r2) || (kArchVariant == kMips64r6)); |
| 1761 GenInstrRegister(SPECIAL3, rs, rt, pos + size - 1, pos, INS); | 2183 GenInstrRegister(SPECIAL3, rs, rt, pos + size - 1, pos, INS); |
| 1762 } | 2184 } |
| 1763 | 2185 |
| 1764 | 2186 |
| 1765 void Assembler::ext_(Register rt, Register rs, uint16_t pos, uint16_t size) { | 2187 void Assembler::ext_(Register rt, Register rs, uint16_t pos, uint16_t size) { |
| 1766 // Should be called via MacroAssembler::Ext. | 2188 // Should be called via MacroAssembler::Ext. |
| 1767 // Ext instr has 'rt' field as dest, and two uint5: msb, lsb. | 2189 // Ext instr has 'rt' field as dest, and two uint5: msb, lsb. |
| 1768 ASSERT(kArchVariant == kMips64r2); | 2190 ASSERT(kArchVariant == kMips64r2 || kArchVariant == kMips64r6); |
| 1769 GenInstrRegister(SPECIAL3, rs, rt, size - 1, pos, EXT); | 2191 GenInstrRegister(SPECIAL3, rs, rt, size - 1, pos, EXT); |
| 1770 } | 2192 } |
| 1771 | 2193 |
| 1772 | 2194 |
| 1773 void Assembler::pref(int32_t hint, const MemOperand& rs) { | 2195 void Assembler::pref(int32_t hint, const MemOperand& rs) { |
| 1774 ASSERT(kArchVariant != kLoongson); | |
| 1775 ASSERT(is_uint5(hint) && is_uint16(rs.offset_)); | 2196 ASSERT(is_uint5(hint) && is_uint16(rs.offset_)); |
| 1776 Instr instr = PREF | (rs.rm().code() << kRsShift) | (hint << kRtShift) | 2197 Instr instr = PREF | (rs.rm().code() << kRsShift) | (hint << kRtShift) |
| 1777 | (rs.offset_); | 2198 | (rs.offset_); |
| 1778 emit(instr); | 2199 emit(instr); |
| 1779 } | 2200 } |
| 1780 | 2201 |
| 1781 | 2202 |
| 1782 // --------Coprocessor-instructions---------------- | 2203 // --------Coprocessor-instructions---------------- |
| 1783 | 2204 |
| 1784 // Load, store, move. | 2205 // Load, store, move. |
| (...skipping 78 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1863 } | 2284 } |
| 1864 | 2285 |
| 1865 | 2286 |
| 1866 void Assembler::mul_d(FPURegister fd, FPURegister fs, FPURegister ft) { | 2287 void Assembler::mul_d(FPURegister fd, FPURegister fs, FPURegister ft) { |
| 1867 GenInstrRegister(COP1, D, ft, fs, fd, MUL_D); | 2288 GenInstrRegister(COP1, D, ft, fs, fd, MUL_D); |
| 1868 } | 2289 } |
| 1869 | 2290 |
| 1870 | 2291 |
| 1871 void Assembler::madd_d(FPURegister fd, FPURegister fr, FPURegister fs, | 2292 void Assembler::madd_d(FPURegister fd, FPURegister fr, FPURegister fs, |
| 1872 FPURegister ft) { | 2293 FPURegister ft) { |
| 1873 ASSERT(kArchVariant != kLoongson); | |
| 1874 GenInstrRegister(COP1X, fr, ft, fs, fd, MADD_D); | 2294 GenInstrRegister(COP1X, fr, ft, fs, fd, MADD_D); |
| 1875 } | 2295 } |
| 1876 | 2296 |
| 1877 | 2297 |
| 1878 void Assembler::div_d(FPURegister fd, FPURegister fs, FPURegister ft) { | 2298 void Assembler::div_d(FPURegister fd, FPURegister fs, FPURegister ft) { |
| 1879 GenInstrRegister(COP1, D, ft, fs, fd, DIV_D); | 2299 GenInstrRegister(COP1, D, ft, fs, fd, DIV_D); |
| 1880 } | 2300 } |
| 1881 | 2301 |
| 1882 | 2302 |
| 1883 void Assembler::abs_d(FPURegister fd, FPURegister fs) { | 2303 void Assembler::abs_d(FPURegister fd, FPURegister fs) { |
| (...skipping 115 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1999 void Assembler::ceil_l_s(FPURegister fd, FPURegister fs) { | 2419 void Assembler::ceil_l_s(FPURegister fd, FPURegister fs) { |
| 2000 GenInstrRegister(COP1, S, f0, fs, fd, CEIL_L_S); | 2420 GenInstrRegister(COP1, S, f0, fs, fd, CEIL_L_S); |
| 2001 } | 2421 } |
| 2002 | 2422 |
| 2003 | 2423 |
| 2004 void Assembler::ceil_l_d(FPURegister fd, FPURegister fs) { | 2424 void Assembler::ceil_l_d(FPURegister fd, FPURegister fs) { |
| 2005 GenInstrRegister(COP1, D, f0, fs, fd, CEIL_L_D); | 2425 GenInstrRegister(COP1, D, f0, fs, fd, CEIL_L_D); |
| 2006 } | 2426 } |
| 2007 | 2427 |
| 2008 | 2428 |
| 2429 void Assembler::min(SecondaryField fmt, FPURegister fd, FPURegister ft, |
| 2430 FPURegister fs) { |
| 2431 ASSERT(kArchVariant == kMips64r6); |
| 2432 ASSERT((fmt == D) || (fmt == S)); |
| 2433 GenInstrRegister(COP1, fmt, ft, fs, fd, MIN); |
| 2434 } |
| 2435 |
| 2436 |
| 2437 void Assembler::mina(SecondaryField fmt, FPURegister fd, FPURegister ft, |
| 2438 FPURegister fs) { |
| 2439 ASSERT(kArchVariant == kMips64r6); |
| 2440 ASSERT((fmt == D) || (fmt == S)); |
| 2441 GenInstrRegister(COP1, fmt, ft, fs, fd, MINA); |
| 2442 } |
| 2443 |
| 2444 |
| 2445 void Assembler::max(SecondaryField fmt, FPURegister fd, FPURegister ft, |
| 2446 FPURegister fs) { |
| 2447 ASSERT(kArchVariant == kMips64r6); |
| 2448 ASSERT((fmt == D) || (fmt == S)); |
| 2449 GenInstrRegister(COP1, fmt, ft, fs, fd, MAX); |
| 2450 } |
| 2451 |
| 2452 |
| 2453 void Assembler::maxa(SecondaryField fmt, FPURegister fd, FPURegister ft, |
| 2454 FPURegister fs) { |
| 2455 ASSERT(kArchVariant == kMips64r6); |
| 2456 ASSERT((fmt == D) || (fmt == S)); |
| 2457 GenInstrRegister(COP1, fmt, ft, fs, fd, MAXA); |
| 2458 } |
| 2459 |
| 2460 |
| 2009 void Assembler::cvt_s_w(FPURegister fd, FPURegister fs) { | 2461 void Assembler::cvt_s_w(FPURegister fd, FPURegister fs) { |
| 2010 GenInstrRegister(COP1, W, f0, fs, fd, CVT_S_W); | 2462 GenInstrRegister(COP1, W, f0, fs, fd, CVT_S_W); |
| 2011 } | 2463 } |
| 2012 | 2464 |
| 2013 | 2465 |
| 2014 void Assembler::cvt_s_l(FPURegister fd, FPURegister fs) { | 2466 void Assembler::cvt_s_l(FPURegister fd, FPURegister fs) { |
| 2015 ASSERT(kArchVariant == kMips64r2); | 2467 ASSERT(kArchVariant == kMips64r2); |
| 2016 GenInstrRegister(COP1, L, f0, fs, fd, CVT_S_L); | 2468 GenInstrRegister(COP1, L, f0, fs, fd, CVT_S_L); |
| 2017 } | 2469 } |
| 2018 | 2470 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 2031 ASSERT(kArchVariant == kMips64r2); | 2483 ASSERT(kArchVariant == kMips64r2); |
| 2032 GenInstrRegister(COP1, L, f0, fs, fd, CVT_D_L); | 2484 GenInstrRegister(COP1, L, f0, fs, fd, CVT_D_L); |
| 2033 } | 2485 } |
| 2034 | 2486 |
| 2035 | 2487 |
| 2036 void Assembler::cvt_d_s(FPURegister fd, FPURegister fs) { | 2488 void Assembler::cvt_d_s(FPURegister fd, FPURegister fs) { |
| 2037 GenInstrRegister(COP1, S, f0, fs, fd, CVT_D_S); | 2489 GenInstrRegister(COP1, S, f0, fs, fd, CVT_D_S); |
| 2038 } | 2490 } |
| 2039 | 2491 |
| 2040 | 2492 |
| 2041 // Conditions. | 2493 // Conditions for >= MIPSr6. |
| 2494 void Assembler::cmp(FPUCondition cond, SecondaryField fmt, |
| 2495 FPURegister fd, FPURegister fs, FPURegister ft) { |
| 2496 ASSERT(kArchVariant == kMips64r6); |
| 2497 ASSERT((fmt & ~(31 << kRsShift)) == 0); |
| 2498 Instr instr = COP1 | fmt | ft.code() << kFtShift | |
| 2499 fs.code() << kFsShift | fd.code() << kFdShift | (0 << 5) | cond; |
| 2500 emit(instr); |
| 2501 } |
| 2502 |
| 2503 |
| 2504 void Assembler::bc1eqz(int16_t offset, FPURegister ft) { |
| 2505 ASSERT(kArchVariant == kMips64r6); |
| 2506 Instr instr = COP1 | BC1EQZ | ft.code() << kFtShift | (offset & kImm16Mask); |
| 2507 emit(instr); |
| 2508 } |
| 2509 |
| 2510 |
| 2511 void Assembler::bc1nez(int16_t offset, FPURegister ft) { |
| 2512 ASSERT(kArchVariant == kMips64r6); |
| 2513 Instr instr = COP1 | BC1NEZ | ft.code() << kFtShift | (offset & kImm16Mask); |
| 2514 emit(instr); |
| 2515 } |
| 2516 |
| 2517 |
| 2518 // Conditions for < MIPSr6. |
| 2042 void Assembler::c(FPUCondition cond, SecondaryField fmt, | 2519 void Assembler::c(FPUCondition cond, SecondaryField fmt, |
| 2043 FPURegister fs, FPURegister ft, uint16_t cc) { | 2520 FPURegister fs, FPURegister ft, uint16_t cc) { |
| 2521 ASSERT(kArchVariant != kMips64r6); |
| 2044 ASSERT(is_uint3(cc)); | 2522 ASSERT(is_uint3(cc)); |
| 2045 ASSERT((fmt & ~(31 << kRsShift)) == 0); | 2523 ASSERT((fmt & ~(31 << kRsShift)) == 0); |
| 2046 Instr instr = COP1 | fmt | ft.code() << 16 | fs.code() << kFsShift | 2524 Instr instr = COP1 | fmt | ft.code() << kFtShift | fs.code() << kFsShift |
| 2047 | cc << 8 | 3 << 4 | cond; | 2525 | cc << 8 | 3 << 4 | cond; |
| 2048 emit(instr); | 2526 emit(instr); |
| 2049 } | 2527 } |
| 2050 | 2528 |
| 2051 | 2529 |
| 2052 void Assembler::fcmp(FPURegister src1, const double src2, | 2530 void Assembler::fcmp(FPURegister src1, const double src2, |
| 2053 FPUCondition cond) { | 2531 FPUCondition cond) { |
| 2054 ASSERT(src2 == 0.0); | 2532 ASSERT(src2 == 0.0); |
| 2055 mtc1(zero_reg, f14); | 2533 mtc1(zero_reg, f14); |
| 2056 cvt_d_w(f14, f14); | 2534 cvt_d_w(f14, f14); |
| (...skipping 391 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2448 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { | 2926 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { |
| 2449 // No out-of-line constant pool support. | 2927 // No out-of-line constant pool support. |
| 2450 ASSERT(!FLAG_enable_ool_constant_pool); | 2928 ASSERT(!FLAG_enable_ool_constant_pool); |
| 2451 return; | 2929 return; |
| 2452 } | 2930 } |
| 2453 | 2931 |
| 2454 | 2932 |
| 2455 } } // namespace v8::internal | 2933 } } // namespace v8::internal |
| 2456 | 2934 |
| 2457 #endif // V8_TARGET_ARCH_MIPS64 | 2935 #endif // V8_TARGET_ARCH_MIPS64 |
| OLD | NEW |