OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 505 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
516 F_CVT f = FUNCTION_CAST<F_CVT>(code->entry()); | 516 F_CVT f = FUNCTION_CAST<F_CVT>(code->entry()); |
517 | 517 |
518 return reinterpret_cast<RET_TYPE>( | 518 return reinterpret_cast<RET_TYPE>( |
519 CALL_GENERATED_CODE(isolate, f, x, 0, 0, 0, 0)); | 519 CALL_GENERATED_CODE(isolate, f, x, 0, 0, 0, 0)); |
520 } | 520 } |
521 | 521 |
522 TEST(cvt_s_w_Trunc_uw_s) { | 522 TEST(cvt_s_w_Trunc_uw_s) { |
523 CcTest::InitializeVM(); | 523 CcTest::InitializeVM(); |
524 FOR_UINT32_INPUTS(i, cvt_trunc_uint32_test_values) { | 524 FOR_UINT32_INPUTS(i, cvt_trunc_uint32_test_values) { |
525 uint32_t input = *i; | 525 uint32_t input = *i; |
526 CHECK_EQ(static_cast<float>(input), | 526 auto fn = [](MacroAssembler* masm) { |
527 run_Cvt<uint32_t>(input, [](MacroAssembler* masm) { | 527 __ cvt_s_w(f0, f4); |
528 __ cvt_s_w(f0, f4); | 528 __ Trunc_uw_s(f2, f0, f1); |
529 __ Trunc_uw_s(f2, f0, f1); | 529 }; |
530 })); | 530 CHECK_EQ(static_cast<float>(input), run_Cvt<uint32_t>(input, fn)); |
531 } | 531 } |
532 } | 532 } |
533 | 533 |
534 TEST(cvt_d_w_Trunc_w_d) { | 534 TEST(cvt_d_w_Trunc_w_d) { |
535 CcTest::InitializeVM(); | 535 CcTest::InitializeVM(); |
536 FOR_INT32_INPUTS(i, cvt_trunc_int32_test_values) { | 536 FOR_INT32_INPUTS(i, cvt_trunc_int32_test_values) { |
537 int32_t input = *i; | 537 int32_t input = *i; |
538 CHECK_EQ(static_cast<double>(input), | 538 auto fn = [](MacroAssembler* masm) { |
539 run_Cvt<int32_t>(input, [](MacroAssembler* masm) { | 539 __ cvt_d_w(f0, f4); |
540 __ cvt_d_w(f0, f4); | 540 __ Trunc_w_d(f2, f0); |
541 __ Trunc_w_d(f2, f0); | 541 }; |
542 })); | 542 CHECK_EQ(static_cast<double>(input), run_Cvt<int32_t>(input, fn)); |
543 } | 543 } |
544 } | 544 } |
545 | 545 |
546 static const std::vector<int32_t> overflow_int32_test_values() { | 546 static const std::vector<int32_t> overflow_int32_test_values() { |
547 static const int32_t kValues[] = { | 547 static const int32_t kValues[] = { |
548 static_cast<int32_t>(0xf0000000), static_cast<int32_t>(0x00000001), | 548 static_cast<int32_t>(0xf0000000), static_cast<int32_t>(0x00000001), |
549 static_cast<int32_t>(0xff000000), static_cast<int32_t>(0x0000f000), | 549 static_cast<int32_t>(0xff000000), static_cast<int32_t>(0x0000f000), |
550 static_cast<int32_t>(0x0f000000), static_cast<int32_t>(0x991234ab), | 550 static_cast<int32_t>(0x0f000000), static_cast<int32_t>(0x991234ab), |
551 static_cast<int32_t>(0xb0ffff01), static_cast<int32_t>(0x00006fff), | 551 static_cast<int32_t>(0xb0ffff01), static_cast<int32_t>(0x00006fff), |
552 static_cast<int32_t>(0xffffffff)}; | 552 static_cast<int32_t>(0xffffffff)}; |
(...skipping 565 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1118 char memory_buffer[kBufferSize]; | 1118 char memory_buffer[kBufferSize]; |
1119 char* buffer_middle = memory_buffer + (kBufferSize / 2); | 1119 char* buffer_middle = memory_buffer + (kBufferSize / 2); |
1120 | 1120 |
1121 FOR_UINT64_INPUTS(i, unsigned_test_values) { | 1121 FOR_UINT64_INPUTS(i, unsigned_test_values) { |
1122 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { | 1122 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { |
1123 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { | 1123 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { |
1124 uint16_t value = static_cast<uint64_t>(*i & 0xFFFF); | 1124 uint16_t value = static_cast<uint64_t>(*i & 0xFFFF); |
1125 int32_t in_offset = *j1 + *k1; | 1125 int32_t in_offset = *j1 + *k1; |
1126 int32_t out_offset = *j2 + *k2; | 1126 int32_t out_offset = *j2 + *k2; |
1127 | 1127 |
1128 CHECK_EQ(true, run_Unaligned<uint16_t>( | 1128 auto fn_1 = [](MacroAssembler* masm, int32_t in_offset, |
1129 buffer_middle, in_offset, out_offset, value, | 1129 int32_t out_offset) { |
1130 [](MacroAssembler* masm, int32_t in_offset, | 1130 __ Ulh(v0, MemOperand(a0, in_offset)); |
1131 int32_t out_offset) { | 1131 __ Ush(v0, MemOperand(a0, out_offset), v0); |
1132 __ Ulh(v0, MemOperand(a0, in_offset)); | 1132 }; |
1133 __ Ush(v0, MemOperand(a0, out_offset), v0); | 1133 CHECK_EQ(true, run_Unaligned<uint16_t>(buffer_middle, in_offset, |
1134 })); | 1134 out_offset, value, fn_1)); |
1135 CHECK_EQ(true, run_Unaligned<uint16_t>( | 1135 |
1136 buffer_middle, in_offset, out_offset, value, | 1136 auto fn_2 = [](MacroAssembler* masm, int32_t in_offset, |
1137 [](MacroAssembler* masm, int32_t in_offset, | 1137 int32_t out_offset) { |
1138 int32_t out_offset) { | 1138 __ mov(t0, a0); |
1139 __ mov(t0, a0); | 1139 __ Ulh(a0, MemOperand(a0, in_offset)); |
1140 __ Ulh(a0, MemOperand(a0, in_offset)); | 1140 __ Ush(a0, MemOperand(t0, out_offset), v0); |
1141 __ Ush(a0, MemOperand(t0, out_offset), v0); | 1141 }; |
1142 })); | 1142 CHECK_EQ(true, run_Unaligned<uint16_t>(buffer_middle, in_offset, |
1143 CHECK_EQ(true, run_Unaligned<uint16_t>( | 1143 out_offset, value, fn_2)); |
1144 buffer_middle, in_offset, out_offset, value, | 1144 |
1145 [](MacroAssembler* masm, int32_t in_offset, | 1145 auto fn_3 = [](MacroAssembler* masm, int32_t in_offset, |
1146 int32_t out_offset) { | 1146 int32_t out_offset) { |
1147 __ mov(t0, a0); | 1147 __ mov(t0, a0); |
1148 __ Ulhu(a0, MemOperand(a0, in_offset)); | 1148 __ Ulhu(a0, MemOperand(a0, in_offset)); |
1149 __ Ush(a0, MemOperand(t0, out_offset), t1); | 1149 __ Ush(a0, MemOperand(t0, out_offset), t1); |
1150 })); | 1150 }; |
1151 CHECK_EQ(true, run_Unaligned<uint16_t>( | 1151 CHECK_EQ(true, run_Unaligned<uint16_t>(buffer_middle, in_offset, |
1152 buffer_middle, in_offset, out_offset, value, | 1152 out_offset, value, fn_3)); |
1153 [](MacroAssembler* masm, int32_t in_offset, | 1153 |
1154 int32_t out_offset) { | 1154 auto fn_4 = [](MacroAssembler* masm, int32_t in_offset, |
1155 __ Ulhu(v0, MemOperand(a0, in_offset)); | 1155 int32_t out_offset) { |
1156 __ Ush(v0, MemOperand(a0, out_offset), t1); | 1156 __ Ulhu(v0, MemOperand(a0, in_offset)); |
1157 })); | 1157 __ Ush(v0, MemOperand(a0, out_offset), t1); |
| 1158 }; |
| 1159 CHECK_EQ(true, run_Unaligned<uint16_t>(buffer_middle, in_offset, |
| 1160 out_offset, value, fn_4)); |
1158 } | 1161 } |
1159 } | 1162 } |
1160 } | 1163 } |
1161 } | 1164 } |
1162 | 1165 |
1163 TEST(Ulh_bitextension) { | 1166 TEST(Ulh_bitextension) { |
1164 CcTest::InitializeVM(); | 1167 CcTest::InitializeVM(); |
1165 | 1168 |
1166 static const int kBufferSize = 300 * KB; | 1169 static const int kBufferSize = 300 * KB; |
1167 char memory_buffer[kBufferSize]; | 1170 char memory_buffer[kBufferSize]; |
1168 char* buffer_middle = memory_buffer + (kBufferSize / 2); | 1171 char* buffer_middle = memory_buffer + (kBufferSize / 2); |
1169 | 1172 |
1170 FOR_UINT64_INPUTS(i, unsigned_test_values) { | 1173 FOR_UINT64_INPUTS(i, unsigned_test_values) { |
1171 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { | 1174 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { |
1172 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { | 1175 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { |
1173 uint16_t value = static_cast<uint64_t>(*i & 0xFFFF); | 1176 uint16_t value = static_cast<uint64_t>(*i & 0xFFFF); |
1174 int32_t in_offset = *j1 + *k1; | 1177 int32_t in_offset = *j1 + *k1; |
1175 int32_t out_offset = *j2 + *k2; | 1178 int32_t out_offset = *j2 + *k2; |
1176 | 1179 |
1177 CHECK_EQ(true, run_Unaligned<uint16_t>( | 1180 auto fn = [](MacroAssembler* masm, int32_t in_offset, |
1178 buffer_middle, in_offset, out_offset, value, | 1181 int32_t out_offset) { |
1179 [](MacroAssembler* masm, int32_t in_offset, | 1182 Label success, fail, end, different; |
1180 int32_t out_offset) { | 1183 __ Ulh(t0, MemOperand(a0, in_offset)); |
1181 Label success, fail, end, different; | 1184 __ Ulhu(t1, MemOperand(a0, in_offset)); |
1182 __ Ulh(t0, MemOperand(a0, in_offset)); | 1185 __ Branch(&different, ne, t0, Operand(t1)); |
1183 __ Ulhu(t1, MemOperand(a0, in_offset)); | |
1184 __ Branch(&different, ne, t0, Operand(t1)); | |
1185 | 1186 |
1186 // If signed and unsigned values are same, check | 1187 // If signed and unsigned values are same, check |
1187 // the upper bits to see if they are zero | 1188 // the upper bits to see if they are zero |
1188 __ sra(t0, t0, 15); | 1189 __ sra(t0, t0, 15); |
1189 __ Branch(&success, eq, t0, Operand(zero_reg)); | 1190 __ Branch(&success, eq, t0, Operand(zero_reg)); |
1190 __ Branch(&fail); | 1191 __ Branch(&fail); |
1191 | 1192 |
1192 // If signed and unsigned values are different, | 1193 // If signed and unsigned values are different, |
1193 // check that the upper bits are complementary | 1194 // check that the upper bits are complementary |
1194 __ bind(&different); | 1195 __ bind(&different); |
1195 __ sra(t1, t1, 15); | 1196 __ sra(t1, t1, 15); |
1196 __ Branch(&fail, ne, t1, Operand(1)); | 1197 __ Branch(&fail, ne, t1, Operand(1)); |
1197 __ sra(t0, t0, 15); | 1198 __ sra(t0, t0, 15); |
1198 __ addiu(t0, t0, 1); | 1199 __ addiu(t0, t0, 1); |
1199 __ Branch(&fail, ne, t0, Operand(zero_reg)); | 1200 __ Branch(&fail, ne, t0, Operand(zero_reg)); |
1200 // Fall through to success | 1201 // Fall through to success |
1201 | 1202 |
1202 __ bind(&success); | 1203 __ bind(&success); |
1203 __ Ulh(t0, MemOperand(a0, in_offset)); | 1204 __ Ulh(t0, MemOperand(a0, in_offset)); |
1204 __ Ush(t0, MemOperand(a0, out_offset), v0); | 1205 __ Ush(t0, MemOperand(a0, out_offset), v0); |
1205 __ Branch(&end); | 1206 __ Branch(&end); |
1206 __ bind(&fail); | 1207 __ bind(&fail); |
1207 __ Ush(zero_reg, MemOperand(a0, out_offset), v0); | 1208 __ Ush(zero_reg, MemOperand(a0, out_offset), v0); |
1208 __ bind(&end); | 1209 __ bind(&end); |
1209 })); | 1210 }; |
| 1211 CHECK_EQ(true, run_Unaligned<uint16_t>(buffer_middle, in_offset, |
| 1212 out_offset, value, fn)); |
1210 } | 1213 } |
1211 } | 1214 } |
1212 } | 1215 } |
1213 } | 1216 } |
1214 | 1217 |
1215 TEST(Ulw) { | 1218 TEST(Ulw) { |
1216 CcTest::InitializeVM(); | 1219 CcTest::InitializeVM(); |
1217 | 1220 |
1218 static const int kBufferSize = 300 * KB; | 1221 static const int kBufferSize = 300 * KB; |
1219 char memory_buffer[kBufferSize]; | 1222 char memory_buffer[kBufferSize]; |
1220 char* buffer_middle = memory_buffer + (kBufferSize / 2); | 1223 char* buffer_middle = memory_buffer + (kBufferSize / 2); |
1221 | 1224 |
1222 FOR_UINT64_INPUTS(i, unsigned_test_values) { | 1225 FOR_UINT64_INPUTS(i, unsigned_test_values) { |
1223 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { | 1226 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { |
1224 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { | 1227 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { |
1225 uint32_t value = static_cast<uint32_t>(*i & 0xFFFFFFFF); | 1228 uint32_t value = static_cast<uint32_t>(*i & 0xFFFFFFFF); |
1226 int32_t in_offset = *j1 + *k1; | 1229 int32_t in_offset = *j1 + *k1; |
1227 int32_t out_offset = *j2 + *k2; | 1230 int32_t out_offset = *j2 + *k2; |
1228 | 1231 |
1229 CHECK_EQ(true, run_Unaligned<uint32_t>( | 1232 auto fn_1 = [](MacroAssembler* masm, int32_t in_offset, |
1230 buffer_middle, in_offset, out_offset, value, | 1233 int32_t out_offset) { |
1231 [](MacroAssembler* masm, int32_t in_offset, | 1234 __ Ulw(v0, MemOperand(a0, in_offset)); |
1232 int32_t out_offset) { | 1235 __ Usw(v0, MemOperand(a0, out_offset)); |
1233 __ Ulw(v0, MemOperand(a0, in_offset)); | 1236 }; |
1234 __ Usw(v0, MemOperand(a0, out_offset)); | 1237 CHECK_EQ(true, run_Unaligned<uint32_t>(buffer_middle, in_offset, |
1235 })); | 1238 out_offset, value, fn_1)); |
| 1239 |
| 1240 auto fn_2 = [](MacroAssembler* masm, int32_t in_offset, |
| 1241 int32_t out_offset) { |
| 1242 __ mov(t0, a0); |
| 1243 __ Ulw(a0, MemOperand(a0, in_offset)); |
| 1244 __ Usw(a0, MemOperand(t0, out_offset)); |
| 1245 }; |
1236 CHECK_EQ(true, | 1246 CHECK_EQ(true, |
1237 run_Unaligned<uint32_t>( | 1247 run_Unaligned<uint32_t>(buffer_middle, in_offset, out_offset, |
1238 buffer_middle, in_offset, out_offset, (uint32_t)value, | 1248 (uint32_t)value, fn_2)); |
1239 [](MacroAssembler* masm, int32_t in_offset, | |
1240 int32_t out_offset) { | |
1241 __ mov(t0, a0); | |
1242 __ Ulw(a0, MemOperand(a0, in_offset)); | |
1243 __ Usw(a0, MemOperand(t0, out_offset)); | |
1244 })); | |
1245 } | 1249 } |
1246 } | 1250 } |
1247 } | 1251 } |
1248 } | 1252 } |
1249 | 1253 |
1250 TEST(Ulwc1) { | 1254 TEST(Ulwc1) { |
1251 CcTest::InitializeVM(); | 1255 CcTest::InitializeVM(); |
1252 | 1256 |
1253 static const int kBufferSize = 300 * KB; | 1257 static const int kBufferSize = 300 * KB; |
1254 char memory_buffer[kBufferSize]; | 1258 char memory_buffer[kBufferSize]; |
1255 char* buffer_middle = memory_buffer + (kBufferSize / 2); | 1259 char* buffer_middle = memory_buffer + (kBufferSize / 2); |
1256 | 1260 |
1257 FOR_UINT64_INPUTS(i, unsigned_test_values) { | 1261 FOR_UINT64_INPUTS(i, unsigned_test_values) { |
1258 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { | 1262 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { |
1259 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { | 1263 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { |
1260 float value = static_cast<float>(*i & 0xFFFFFFFF); | 1264 float value = static_cast<float>(*i & 0xFFFFFFFF); |
1261 int32_t in_offset = *j1 + *k1; | 1265 int32_t in_offset = *j1 + *k1; |
1262 int32_t out_offset = *j2 + *k2; | 1266 int32_t out_offset = *j2 + *k2; |
1263 | 1267 |
1264 CHECK_EQ(true, run_Unaligned<float>( | 1268 auto fn = [](MacroAssembler* masm, int32_t in_offset, |
1265 buffer_middle, in_offset, out_offset, value, | 1269 int32_t out_offset) { |
1266 [](MacroAssembler* masm, int32_t in_offset, | 1270 __ Ulwc1(f0, MemOperand(a0, in_offset), t0); |
1267 int32_t out_offset) { | 1271 __ Uswc1(f0, MemOperand(a0, out_offset), t0); |
1268 __ Ulwc1(f0, MemOperand(a0, in_offset), t0); | 1272 }; |
1269 __ Uswc1(f0, MemOperand(a0, out_offset), t0); | 1273 CHECK_EQ(true, run_Unaligned<float>(buffer_middle, in_offset, |
1270 })); | 1274 out_offset, value, fn)); |
1271 } | 1275 } |
1272 } | 1276 } |
1273 } | 1277 } |
1274 } | 1278 } |
1275 | 1279 |
1276 TEST(Uldc1) { | 1280 TEST(Uldc1) { |
1277 CcTest::InitializeVM(); | 1281 CcTest::InitializeVM(); |
1278 | 1282 |
1279 static const int kBufferSize = 300 * KB; | 1283 static const int kBufferSize = 300 * KB; |
1280 char memory_buffer[kBufferSize]; | 1284 char memory_buffer[kBufferSize]; |
1281 char* buffer_middle = memory_buffer + (kBufferSize / 2); | 1285 char* buffer_middle = memory_buffer + (kBufferSize / 2); |
1282 | 1286 |
1283 FOR_UINT64_INPUTS(i, unsigned_test_values) { | 1287 FOR_UINT64_INPUTS(i, unsigned_test_values) { |
1284 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { | 1288 FOR_INT32_INPUTS2(j1, j2, unsigned_test_offset) { |
1285 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { | 1289 FOR_INT32_INPUTS2(k1, k2, unsigned_test_offset_increment) { |
1286 double value = static_cast<double>(*i); | 1290 double value = static_cast<double>(*i); |
1287 int32_t in_offset = *j1 + *k1; | 1291 int32_t in_offset = *j1 + *k1; |
1288 int32_t out_offset = *j2 + *k2; | 1292 int32_t out_offset = *j2 + *k2; |
1289 | 1293 |
1290 CHECK_EQ(true, run_Unaligned<double>( | 1294 auto fn = [](MacroAssembler* masm, int32_t in_offset, |
1291 buffer_middle, in_offset, out_offset, value, | 1295 int32_t out_offset) { |
1292 [](MacroAssembler* masm, int32_t in_offset, | 1296 __ Uldc1(f0, MemOperand(a0, in_offset), t0); |
1293 int32_t out_offset) { | 1297 __ Usdc1(f0, MemOperand(a0, out_offset), t0); |
1294 __ Uldc1(f0, MemOperand(a0, in_offset), t0); | 1298 }; |
1295 __ Usdc1(f0, MemOperand(a0, out_offset), t0); | 1299 CHECK_EQ(true, run_Unaligned<double>(buffer_middle, in_offset, |
1296 })); | 1300 out_offset, value, fn)); |
1297 } | 1301 } |
1298 } | 1302 } |
1299 } | 1303 } |
1300 } | 1304 } |
1301 | 1305 |
1302 static const std::vector<uint32_t> sltu_test_values() { | 1306 static const std::vector<uint32_t> sltu_test_values() { |
1303 static const uint32_t kValues[] = { | 1307 static const uint32_t kValues[] = { |
1304 0, 1, 0x7ffe, 0x7fff, 0x8000, | 1308 0, 1, 0x7ffe, 0x7fff, 0x8000, |
1305 0x8001, 0xfffe, 0xffff, 0xffff7ffe, 0xffff7fff, | 1309 0x8001, 0xfffe, 0xffff, 0xffff7ffe, 0xffff7fff, |
1306 0xffff8000, 0xffff8001, 0xfffffffe, 0xffffffff, | 1310 0xffff8000, 0xffff8001, 0xfffffffe, 0xffffffff, |
(...skipping 27 matching lines...) Expand all Loading... |
1334 } | 1338 } |
1335 | 1339 |
1336 TEST(Sltu) { | 1340 TEST(Sltu) { |
1337 CcTest::InitializeVM(); | 1341 CcTest::InitializeVM(); |
1338 | 1342 |
1339 FOR_UINT32_INPUTS(i, sltu_test_values) { | 1343 FOR_UINT32_INPUTS(i, sltu_test_values) { |
1340 FOR_UINT32_INPUTS(j, sltu_test_values) { | 1344 FOR_UINT32_INPUTS(j, sltu_test_values) { |
1341 uint32_t rs = *i; | 1345 uint32_t rs = *i; |
1342 uint32_t rd = *j; | 1346 uint32_t rd = *j; |
1343 | 1347 |
1344 CHECK_EQ(rs < rd, run_Sltu(rs, rd, | 1348 auto fn_1 = [](MacroAssembler* masm, uint32_t imm) { |
1345 [](MacroAssembler* masm, uint32_t imm) { | 1349 __ Sltu(v0, a0, Operand(imm)); |
1346 __ Sltu(v0, a0, Operand(imm)); | 1350 }; |
1347 })); | 1351 CHECK_EQ(rs < rd, run_Sltu(rs, rd, fn_1)); |
1348 CHECK_EQ(rs < rd, | 1352 |
1349 run_Sltu(rs, rd, [](MacroAssembler* masm, | 1353 auto fn_2 = [](MacroAssembler* masm, uint32_t imm) { |
1350 uint32_t imm) { __ Sltu(v0, a0, a1); })); | 1354 __ Sltu(v0, a0, a1); |
| 1355 }; |
| 1356 CHECK_EQ(rs < rd, run_Sltu(rs, rd, fn_2)); |
1351 } | 1357 } |
1352 } | 1358 } |
1353 } | 1359 } |
1354 | 1360 |
1355 #undef __ | 1361 #undef __ |
OLD | NEW |