Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(738)

Side by Side Diff: src/mips64/macro-assembler-mips64.cc

Issue 1534183002: MIPS64: r6 compact branch optimization. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <limits.h> // For LONG_MIN, LONG_MAX. 5 #include <limits.h> // For LONG_MIN, LONG_MAX.
6 6
7 #if V8_TARGET_ARCH_MIPS64 7 #if V8_TARGET_ARCH_MIPS64
8 8
9 #include "src/base/division-by-constant.h" 9 #include "src/base/division-by-constant.h"
10 #include "src/bootstrapper.h" 10 #include "src/bootstrapper.h"
(...skipping 1811 matching lines...) Expand 10 before | Expand all | Expand 10 after
1822 DCHECK(nan || target); 1822 DCHECK(nan || target);
1823 // Check for unordered (NaN) cases. 1823 // Check for unordered (NaN) cases.
1824 if (nan) { 1824 if (nan) {
1825 bool long_branch = nan->is_bound() ? is_near(nan) : is_trampoline_emitted(); 1825 bool long_branch = nan->is_bound() ? is_near(nan) : is_trampoline_emitted();
1826 if (kArchVariant != kMips64r6) { 1826 if (kArchVariant != kMips64r6) {
1827 if (long_branch) { 1827 if (long_branch) {
1828 Label skip; 1828 Label skip;
1829 c(UN, sizeField, cmp1, cmp2); 1829 c(UN, sizeField, cmp1, cmp2);
1830 bc1f(&skip); 1830 bc1f(&skip);
1831 nop(); 1831 nop();
1832 J(nan, bd); 1832 BranchLong(nan, bd);
1833 bind(&skip); 1833 bind(&skip);
1834 } else { 1834 } else {
1835 c(UN, sizeField, cmp1, cmp2); 1835 c(UN, sizeField, cmp1, cmp2);
1836 bc1t(nan); 1836 bc1t(nan);
1837 if (bd == PROTECT) { 1837 if (bd == PROTECT) {
1838 nop(); 1838 nop();
1839 } 1839 }
1840 } 1840 }
1841 } else { 1841 } else {
1842 // Use kDoubleCompareReg for comparison result. It has to be unavailable 1842 // Use kDoubleCompareReg for comparison result. It has to be unavailable
1843 // to lithium 1843 // to lithium
1844 // register allocator. 1844 // register allocator.
1845 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg)); 1845 DCHECK(!cmp1.is(kDoubleCompareReg) && !cmp2.is(kDoubleCompareReg));
1846 if (long_branch) { 1846 if (long_branch) {
1847 Label skip; 1847 Label skip;
1848 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2); 1848 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2);
1849 bc1eqz(&skip, kDoubleCompareReg); 1849 bc1eqz(&skip, kDoubleCompareReg);
1850 nop(); 1850 nop();
1851 J(nan, bd); 1851 BranchLong(nan, bd);
1852 bind(&skip); 1852 bind(&skip);
1853 } else { 1853 } else {
1854 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2); 1854 cmp(UN, sizeField, kDoubleCompareReg, cmp1, cmp2);
1855 bc1nez(nan, kDoubleCompareReg); 1855 bc1nez(nan, kDoubleCompareReg);
1856 if (bd == PROTECT) { 1856 if (bd == PROTECT) {
1857 nop(); 1857 nop();
1858 } 1858 }
1859 } 1859 }
1860 } 1860 }
1861 } 1861 }
1862 1862
1863 if (target) { 1863 if (target) {
1864 bool long_branch = 1864 bool long_branch =
1865 target->is_bound() ? is_near(target) : is_trampoline_emitted(); 1865 target->is_bound() ? is_near(target) : is_trampoline_emitted();
1866 if (long_branch) { 1866 if (long_branch) {
1867 Label skip; 1867 Label skip;
1868 Condition neg_cond = NegateFpuCondition(cond); 1868 Condition neg_cond = NegateFpuCondition(cond);
1869 BranchShortF(sizeField, &skip, neg_cond, cmp1, cmp2, bd); 1869 BranchShortF(sizeField, &skip, neg_cond, cmp1, cmp2, bd);
1870 J(target, bd); 1870 BranchLong(target, bd);
1871 bind(&skip); 1871 bind(&skip);
1872 } else { 1872 } else {
1873 BranchShortF(sizeField, target, cond, cmp1, cmp2, bd); 1873 BranchShortF(sizeField, target, cond, cmp1, cmp2, bd);
1874 } 1874 }
1875 } 1875 }
1876 } 1876 }
1877 1877
1878 1878
1879 void MacroAssembler::BranchShortF(SecondaryField sizeField, Label* target, 1879 void MacroAssembler::BranchShortF(SecondaryField sizeField, Label* target,
1880 Condition cc, FPURegister cmp1, 1880 Condition cc, FPURegister cmp1,
(...skipping 398 matching lines...) Expand 10 before | Expand all | Expand 10 after
2279 2279
2280 2280
2281 // Emulated condtional branches do not emit a nop in the branch delay slot. 2281 // Emulated condtional branches do not emit a nop in the branch delay slot.
2282 // 2282 //
2283 // BRANCH_ARGS_CHECK checks that conditional jump arguments are correct. 2283 // BRANCH_ARGS_CHECK checks that conditional jump arguments are correct.
2284 #define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \ 2284 #define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \
2285 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \ 2285 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \
2286 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg)))) 2286 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg))))
2287 2287
2288 2288
2289 void MacroAssembler::Branch(int16_t offset, BranchDelaySlot bdslot) { 2289 void MacroAssembler::Branch(int32_t offset, BranchDelaySlot bdslot) {
2290 DCHECK(kArchVariant == kMips64r6 ? is_int26(offset) : is_int16(offset));
2290 BranchShort(offset, bdslot); 2291 BranchShort(offset, bdslot);
2291 } 2292 }
2292 2293
2293 2294
2294 void MacroAssembler::Branch(int16_t offset, Condition cond, Register rs, 2295 void MacroAssembler::Branch(int32_t offset, Condition cond, Register rs,
2295 const Operand& rt, 2296 const Operand& rt, BranchDelaySlot bdslot) {
2296 BranchDelaySlot bdslot) { 2297 bool is_near = BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2297 BranchShort(offset, cond, rs, rt, bdslot); 2298 DCHECK(is_near);
2299 USE(is_near);
2298 } 2300 }
2299 2301
2300 2302
2301 void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) { 2303 void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) {
2302 if (L->is_bound()) { 2304 if (L->is_bound()) {
2303 if (is_near(L)) { 2305 if (is_near_branch(L)) {
2304 BranchShort(L, bdslot); 2306 BranchShort(L, bdslot);
2305 } else { 2307 } else {
2306 J(L, bdslot); 2308 BranchLong(L, bdslot);
2307 } 2309 }
2308 } else { 2310 } else {
2309 if (is_trampoline_emitted()) { 2311 if (is_trampoline_emitted()) {
2310 J(L, bdslot); 2312 BranchLong(L, bdslot);
2311 } else { 2313 } else {
2312 BranchShort(L, bdslot); 2314 BranchShort(L, bdslot);
2313 } 2315 }
2314 } 2316 }
2315 } 2317 }
2316 2318
2317 2319
2318 void MacroAssembler::Branch(Label* L, Condition cond, Register rs, 2320 void MacroAssembler::Branch(Label* L, Condition cond, Register rs,
2319 const Operand& rt, 2321 const Operand& rt,
2320 BranchDelaySlot bdslot) { 2322 BranchDelaySlot bdslot) {
2321 if (L->is_bound()) { 2323 if (L->is_bound()) {
2322 if (is_near(L)) { 2324 if (!BranchShortCheck(0, L, cond, rs, rt, bdslot)) {
2323 BranchShort(L, cond, rs, rt, bdslot);
2324 } else {
2325 if (cond != cc_always) { 2325 if (cond != cc_always) {
2326 Label skip; 2326 Label skip;
2327 Condition neg_cond = NegateCondition(cond); 2327 Condition neg_cond = NegateCondition(cond);
2328 BranchShort(&skip, neg_cond, rs, rt); 2328 BranchShort(&skip, neg_cond, rs, rt);
2329 J(L, bdslot); 2329 BranchLong(L, bdslot);
2330 bind(&skip); 2330 bind(&skip);
2331 } else { 2331 } else {
2332 J(L, bdslot); 2332 BranchLong(L, bdslot);
2333 } 2333 }
2334 } 2334 }
2335 } else { 2335 } else {
2336 if (is_trampoline_emitted()) { 2336 if (is_trampoline_emitted()) {
2337 if (cond != cc_always) { 2337 if (cond != cc_always) {
2338 Label skip; 2338 Label skip;
2339 Condition neg_cond = NegateCondition(cond); 2339 Condition neg_cond = NegateCondition(cond);
2340 BranchShort(&skip, neg_cond, rs, rt); 2340 BranchShort(&skip, neg_cond, rs, rt);
2341 J(L, bdslot); 2341 BranchLong(L, bdslot);
2342 bind(&skip); 2342 bind(&skip);
2343 } else { 2343 } else {
2344 J(L, bdslot); 2344 BranchLong(L, bdslot);
2345 } 2345 }
2346 } else { 2346 } else {
2347 BranchShort(L, cond, rs, rt, bdslot); 2347 BranchShort(L, cond, rs, rt, bdslot);
2348 } 2348 }
2349 } 2349 }
2350 } 2350 }
2351 2351
2352 2352
2353 void MacroAssembler::Branch(Label* L, 2353 void MacroAssembler::Branch(Label* L,
2354 Condition cond, 2354 Condition cond,
2355 Register rs, 2355 Register rs,
2356 Heap::RootListIndex index, 2356 Heap::RootListIndex index,
2357 BranchDelaySlot bdslot) { 2357 BranchDelaySlot bdslot) {
2358 LoadRoot(at, index); 2358 LoadRoot(at, index);
2359 Branch(L, cond, rs, Operand(at), bdslot); 2359 Branch(L, cond, rs, Operand(at), bdslot);
2360 } 2360 }
2361 2361
2362 2362
2363 void MacroAssembler::BranchShort(int16_t offset, BranchDelaySlot bdslot) { 2363 void MacroAssembler::BranchShortHelper(int16_t offset, Label* L,
2364 BranchDelaySlot bdslot) {
2365 DCHECK(L == nullptr || offset == 0);
2366 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2364 b(offset); 2367 b(offset);
2365 2368
2366 // Emit a nop in the branch delay slot if required. 2369 // Emit a nop in the branch delay slot if required.
2367 if (bdslot == PROTECT) 2370 if (bdslot == PROTECT)
2368 nop(); 2371 nop();
2369 } 2372 }
2370 2373
2371 2374
2372 void MacroAssembler::BranchShort(int16_t offset, Condition cond, Register rs, 2375 void MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L) {
2373 const Operand& rt, 2376 DCHECK(L == nullptr || offset == 0);
2374 BranchDelaySlot bdslot) { 2377 offset = GetOffset(offset, L, OffsetSize::kOffset26);
2375 BRANCH_ARGS_CHECK(cond, rs, rt); 2378 bc(offset);
2376 DCHECK(!rs.is(zero_reg)); 2379 }
2380
2381
2382 void MacroAssembler::BranchShort(int32_t offset, BranchDelaySlot bdslot) {
2383 if (kArchVariant == kMips64r6 && bdslot == PROTECT) {
2384 DCHECK(is_int26(offset));
2385 BranchShortHelperR6(offset, nullptr);
2386 } else {
2387 DCHECK(is_int16(offset));
2388 BranchShortHelper(offset, nullptr, bdslot);
2389 }
2390 }
2391
2392
2393 void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) {
2394 if (kArchVariant == kMips64r6 && bdslot == PROTECT) {
2395 BranchShortHelperR6(0, L);
2396 } else {
2397 BranchShortHelper(0, L, bdslot);
2398 }
2399 }
2400
2401
2402 static inline bool IsZero(const Operand& rt) {
2403 if (rt.is_reg()) {
2404 return rt.rm().is(zero_reg);
2405 } else {
2406 return rt.immediate() == 0;
2407 }
2408 }
2409
2410
2411 int32_t MacroAssembler::GetOffset(int32_t offset, Label* L, OffsetSize bits) {
2412 if (L) {
2413 offset = branch_offset_helper(L, bits) >> 2;
2414 } else {
2415 DCHECK(is_intn(offset, bits));
2416 }
2417 return offset;
2418 }
2419
2420
2421 Register MacroAssembler::GetRtAsRegisterHelper(const Operand& rt,
2422 Register scratch) {
2377 Register r2 = no_reg; 2423 Register r2 = no_reg;
2378 Register scratch = at;
2379
2380 if (rt.is_reg()) { 2424 if (rt.is_reg()) {
2381 // NOTE: 'at' can be clobbered by Branch but it is legal to use it as rs or 2425 r2 = rt.rm_;
2382 // rt. 2426 } else {
2427 r2 = scratch;
2428 li(r2, rt);
2429 }
2430
2431 return r2;
2432 }
2433
2434
2435 bool MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L,
2436 Condition cond, Register rs,
2437 const Operand& rt) {
2438 DCHECK(L == nullptr || offset == 0);
2439 Register scratch = rs.is(at) ? t8 : at;
2440 OffsetSize bits = OffsetSize::kOffset16;
2441
2442 // Be careful to always use shifted_branch_offset only just before the
2443 // branch instruction, as the location will be remember for patching the
2444 // target.
2445 {
2383 BlockTrampolinePoolScope block_trampoline_pool(this); 2446 BlockTrampolinePoolScope block_trampoline_pool(this);
2384 r2 = rt.rm_;
2385 switch (cond) { 2447 switch (cond) {
2386 case cc_always: 2448 case cc_always:
2387 b(offset); 2449 bits = OffsetSize::kOffset26;
2450 if (!is_near(L, bits)) return false;
2451 offset = GetOffset(offset, L, bits);
2452 bc(offset);
2388 break; 2453 break;
2389 case eq: 2454 case eq:
2390 beq(rs, r2, offset); 2455 if (rs.code() == rt.rm_.reg_code) {
2456 // Pre R6 beq is used here to make the code patchable. Otherwise bc
2457 // should be used which has no condition field so is not patchable.
2458 bits = OffsetSize::kOffset16;
2459 if (!is_near(L, bits)) return false;
2460 scratch = GetRtAsRegisterHelper(rt, scratch);
2461 offset = GetOffset(offset, L, bits);
2462 beq(rs, scratch, offset);
2463 nop();
2464 } else if (IsZero(rt)) {
2465 bits = OffsetSize::kOffset21;
2466 if (!is_near(L, bits)) return false;
2467 offset = GetOffset(offset, L, bits);
2468 beqzc(rs, offset);
2469 } else {
2470 // We don't want any other register but scratch clobbered.
2471 bits = OffsetSize::kOffset16;
2472 if (!is_near(L, bits)) return false;
2473 scratch = GetRtAsRegisterHelper(rt, scratch);
2474 offset = GetOffset(offset, L, bits);
2475 beqc(rs, scratch, offset);
2476 }
2391 break; 2477 break;
2392 case ne: 2478 case ne:
2393 bne(rs, r2, offset); 2479 if (rs.code() == rt.rm_.reg_code) {
2394 break; 2480 // Pre R6 bne is used here to make the code patchable. Otherwise we
2481 // should not generate any instruction.
2482 bits = OffsetSize::kOffset16;
2483 if (!is_near(L, bits)) return false;
2484 scratch = GetRtAsRegisterHelper(rt, scratch);
2485 offset = GetOffset(offset, L, bits);
2486 bne(rs, scratch, offset);
2487 nop();
2488 } else if (IsZero(rt)) {
2489 bits = OffsetSize::kOffset21;
2490 if (!is_near(L, bits)) return false;
2491 offset = GetOffset(offset, L, bits);
2492 bnezc(rs, offset);
2493 } else {
2494 // We don't want any other register but scratch clobbered.
2495 bits = OffsetSize::kOffset16;
2496 if (!is_near(L, bits)) return false;
2497 scratch = GetRtAsRegisterHelper(rt, scratch);
2498 offset = GetOffset(offset, L, bits);
2499 bnec(rs, scratch, offset);
2500 }
2501 break;
2502
2395 // Signed comparison. 2503 // Signed comparison.
2396 case greater: 2504 case greater:
2397 if (r2.is(zero_reg)) { 2505 // rs > rt
2398 bgtz(rs, offset); 2506 if (rs.code() == rt.rm_.reg_code) {
2399 } else { 2507 break; // No code needs to be emitted.
2400 slt(scratch, r2, rs); 2508 } else if (rs.is(zero_reg)) {
2401 bne(scratch, zero_reg, offset); 2509 bits = OffsetSize::kOffset16;
2510 if (!is_near(L, bits)) return false;
2511 scratch = GetRtAsRegisterHelper(rt, scratch);
2512 offset = GetOffset(offset, L, bits);
2513 bltzc(scratch, offset);
2514 } else if (IsZero(rt)) {
2515 bits = OffsetSize::kOffset16;
2516 if (!is_near(L, bits)) return false;
2517 offset = GetOffset(offset, L, bits);
2518 bgtzc(rs, offset);
2519 } else {
2520 bits = OffsetSize::kOffset16;
2521 if (!is_near(L, bits)) return false;
2522 scratch = GetRtAsRegisterHelper(rt, scratch);
2523 DCHECK(!rs.is(scratch));
2524 offset = GetOffset(offset, L, bits);
2525 bltc(scratch, rs, offset);
2402 } 2526 }
2403 break; 2527 break;
2404 case greater_equal: 2528 case greater_equal:
2405 if (r2.is(zero_reg)) { 2529 // rs >= rt
2406 bgez(rs, offset); 2530 if (rs.code() == rt.rm_.reg_code) {
2407 } else { 2531 bits = OffsetSize::kOffset26;
2408 slt(scratch, rs, r2); 2532 if (!is_near(L, bits)) return false;
2409 beq(scratch, zero_reg, offset); 2533 offset = GetOffset(offset, L, bits);
2534 bc(offset);
2535 } else if (rs.is(zero_reg)) {
2536 bits = OffsetSize::kOffset16;
2537 if (!is_near(L, bits)) return false;
2538 scratch = GetRtAsRegisterHelper(rt, scratch);
2539 offset = GetOffset(offset, L, bits);
2540 blezc(scratch, offset);
2541 } else if (IsZero(rt)) {
2542 bits = OffsetSize::kOffset16;
2543 if (!is_near(L, bits)) return false;
2544 offset = GetOffset(offset, L, bits);
2545 bgezc(rs, offset);
2546 } else {
2547 bits = OffsetSize::kOffset16;
2548 if (!is_near(L, bits)) return false;
2549 scratch = GetRtAsRegisterHelper(rt, scratch);
2550 DCHECK(!rs.is(scratch));
2551 offset = GetOffset(offset, L, bits);
2552 bgec(rs, scratch, offset);
2410 } 2553 }
2411 break; 2554 break;
2412 case less: 2555 case less:
2413 if (r2.is(zero_reg)) { 2556 // rs < rt
2414 bltz(rs, offset); 2557 if (rs.code() == rt.rm_.reg_code) {
2415 } else { 2558 break; // No code needs to be emitted.
2416 slt(scratch, rs, r2); 2559 } else if (rs.is(zero_reg)) {
2417 bne(scratch, zero_reg, offset); 2560 bits = OffsetSize::kOffset16;
2561 if (!is_near(L, bits)) return false;
2562 scratch = GetRtAsRegisterHelper(rt, scratch);
2563 offset = GetOffset(offset, L, bits);
2564 bgtzc(scratch, offset);
2565 } else if (IsZero(rt)) {
2566 bits = OffsetSize::kOffset16;
2567 if (!is_near(L, bits)) return false;
2568 offset = GetOffset(offset, L, bits);
2569 bltzc(rs, offset);
2570 } else {
2571 bits = OffsetSize::kOffset16;
2572 if (!is_near(L, bits)) return false;
2573 scratch = GetRtAsRegisterHelper(rt, scratch);
2574 DCHECK(!rs.is(scratch));
2575 offset = GetOffset(offset, L, bits);
2576 bltc(rs, scratch, offset);
2418 } 2577 }
2419 break; 2578 break;
2420 case less_equal: 2579 case less_equal:
2421 if (r2.is(zero_reg)) { 2580 // rs <= rt
2422 blez(rs, offset); 2581 if (rs.code() == rt.rm_.reg_code) {
2423 } else { 2582 bits = OffsetSize::kOffset26;
2424 slt(scratch, r2, rs); 2583 if (!is_near(L, bits)) return false;
2425 beq(scratch, zero_reg, offset); 2584 offset = GetOffset(offset, L, bits);
2426 } 2585 bc(offset);
2427 break; 2586 } else if (rs.is(zero_reg)) {
2587 bits = OffsetSize::kOffset16;
2588 if (!is_near(L, bits)) return false;
2589 scratch = GetRtAsRegisterHelper(rt, scratch);
2590 offset = GetOffset(offset, L, bits);
2591 bgezc(scratch, offset);
2592 } else if (IsZero(rt)) {
2593 bits = OffsetSize::kOffset16;
2594 if (!is_near(L, bits)) return false;
2595 offset = GetOffset(offset, L, bits);
2596 blezc(rs, offset);
2597 } else {
2598 bits = OffsetSize::kOffset16;
2599 if (!is_near(L, bits)) return false;
2600 scratch = GetRtAsRegisterHelper(rt, scratch);
2601 DCHECK(!rs.is(scratch));
2602 offset = GetOffset(offset, L, bits);
2603 bgec(scratch, rs, offset);
2604 }
2605 break;
2606
2428 // Unsigned comparison. 2607 // Unsigned comparison.
2429 case Ugreater: 2608 case Ugreater:
2430 if (r2.is(zero_reg)) { 2609 // rs > rt
2431 bne(rs, zero_reg, offset); 2610 if (rs.code() == rt.rm_.reg_code) {
2432 } else { 2611 break; // No code needs to be emitted.
2433 sltu(scratch, r2, rs); 2612 } else if (rs.is(zero_reg)) {
2434 bne(scratch, zero_reg, offset); 2613 bits = OffsetSize::kOffset21;
2614 if (!is_near(L, bits)) return false;
2615 scratch = GetRtAsRegisterHelper(rt, scratch);
2616 offset = GetOffset(offset, L, bits);
2617 bnezc(scratch, offset);
2618 } else if (IsZero(rt)) {
2619 bits = OffsetSize::kOffset21;
2620 if (!is_near(L, bits)) return false;
2621 offset = GetOffset(offset, L, bits);
2622 bnezc(rs, offset);
2623 } else {
2624 bits = OffsetSize::kOffset16;
2625 if (!is_near(L, bits)) return false;
2626 scratch = GetRtAsRegisterHelper(rt, scratch);
2627 DCHECK(!rs.is(scratch));
2628 offset = GetOffset(offset, L, bits);
2629 bltuc(scratch, rs, offset);
2435 } 2630 }
2436 break; 2631 break;
2437 case Ugreater_equal: 2632 case Ugreater_equal:
2438 if (r2.is(zero_reg)) { 2633 // rs >= rt
2439 b(offset); 2634 if (rs.code() == rt.rm_.reg_code) {
2440 } else { 2635 bits = OffsetSize::kOffset26;
2441 sltu(scratch, rs, r2); 2636 if (!is_near(L, bits)) return false;
2442 beq(scratch, zero_reg, offset); 2637 offset = GetOffset(offset, L, bits);
2638 bc(offset);
2639 } else if (rs.is(zero_reg)) {
2640 bits = OffsetSize::kOffset21;
2641 if (!is_near(L, bits)) return false;
2642 scratch = GetRtAsRegisterHelper(rt, scratch);
2643 offset = GetOffset(offset, L, bits);
2644 beqzc(scratch, offset);
2645 } else if (IsZero(rt)) {
2646 bits = OffsetSize::kOffset26;
2647 if (!is_near(L, bits)) return false;
2648 offset = GetOffset(offset, L, bits);
2649 bc(offset);
2650 } else {
2651 bits = OffsetSize::kOffset16;
2652 if (!is_near(L, bits)) return false;
2653 scratch = GetRtAsRegisterHelper(rt, scratch);
2654 DCHECK(!rs.is(scratch));
2655 offset = GetOffset(offset, L, bits);
2656 bgeuc(rs, scratch, offset);
2443 } 2657 }
2444 break; 2658 break;
2445 case Uless: 2659 case Uless:
2446 if (r2.is(zero_reg)) { 2660 // rs < rt
2447 // No code needs to be emitted. 2661 if (rs.code() == rt.rm_.reg_code) {
2448 return; 2662 break; // No code needs to be emitted.
2449 } else { 2663 } else if (rs.is(zero_reg)) {
2450 sltu(scratch, rs, r2); 2664 bits = OffsetSize::kOffset21;
2451 bne(scratch, zero_reg, offset); 2665 if (!is_near(L, bits)) return false;
2666 scratch = GetRtAsRegisterHelper(rt, scratch);
2667 offset = GetOffset(offset, L, bits);
2668 bnezc(scratch, offset);
2669 } else if (IsZero(rt)) {
2670 break; // No code needs to be emitted.
2671 } else {
2672 bits = OffsetSize::kOffset16;
2673 if (!is_near(L, bits)) return false;
2674 scratch = GetRtAsRegisterHelper(rt, scratch);
2675 DCHECK(!rs.is(scratch));
2676 offset = GetOffset(offset, L, bits);
2677 bltuc(rs, scratch, offset);
2452 } 2678 }
2453 break; 2679 break;
2454 case Uless_equal: 2680 case Uless_equal:
2455 if (r2.is(zero_reg)) { 2681 // rs <= rt
2456 beq(rs, zero_reg, offset); 2682 if (rs.code() == rt.rm_.reg_code) {
2457 } else { 2683 bits = OffsetSize::kOffset26;
2458 sltu(scratch, r2, rs); 2684 if (!is_near(L, bits)) return false;
2459 beq(scratch, zero_reg, offset); 2685 offset = GetOffset(offset, L, bits);
2686 bc(offset);
2687 } else if (rs.is(zero_reg)) {
2688 bits = OffsetSize::kOffset26;
2689 if (!is_near(L, bits)) return false;
2690 scratch = GetRtAsRegisterHelper(rt, scratch);
2691 offset = GetOffset(offset, L, bits);
2692 bc(offset);
2693 } else if (IsZero(rt)) {
2694 bits = OffsetSize::kOffset21;
2695 if (!is_near(L, bits)) return false;
2696 offset = GetOffset(offset, L, bits);
2697 beqzc(rs, offset);
2698 } else {
2699 bits = OffsetSize::kOffset16;
2700 if (!is_near(L, bits)) return false;
2701 scratch = GetRtAsRegisterHelper(rt, scratch);
2702 DCHECK(!rs.is(scratch));
2703 offset = GetOffset(offset, L, bits);
2704 bgeuc(scratch, rs, offset);
2460 } 2705 }
2461 break; 2706 break;
2462 default: 2707 default:
2463 UNREACHABLE(); 2708 UNREACHABLE();
2464 } 2709 }
2465 } else { 2710 }
2466 // Be careful to always use shifted_branch_offset only just before the 2711 CheckTrampolinePoolQuick(1);
2467 // branch instruction, as the location will be remember for patching the 2712 return true;
2468 // target. 2713 }
2714
2715
2716 bool MacroAssembler::BranchShortHelper(int16_t offset, Label* L, Condition cond,
2717 Register rs, const Operand& rt,
2718 BranchDelaySlot bdslot) {
2719 DCHECK(L == nullptr || offset == 0);
2720 if (!is_near(L, OffsetSize::kOffset16)) return false;
2721
2722 Register scratch = at;
2723 int32_t offset32;
2724
2725 // Be careful to always use shifted_branch_offset only just before the
2726 // branch instruction, as the location will be remember for patching the
2727 // target.
2728 {
2469 BlockTrampolinePoolScope block_trampoline_pool(this); 2729 BlockTrampolinePoolScope block_trampoline_pool(this);
2470 switch (cond) { 2730 switch (cond) {
2471 case cc_always: 2731 case cc_always:
2472 b(offset); 2732 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2733 b(offset32);
2473 break; 2734 break;
2474 case eq: 2735 case eq:
2475 if (rt.imm64_ == 0) { 2736 if (IsZero(rt)) {
2476 beq(rs, zero_reg, offset); 2737 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2738 beq(rs, zero_reg, offset32);
2477 } else { 2739 } else {
2478 // We don't want any other register but scratch clobbered. 2740 // We don't want any other register but scratch clobbered.
2479 DCHECK(!scratch.is(rs)); 2741 scratch = GetRtAsRegisterHelper(rt, scratch);
2480 r2 = scratch; 2742 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2481 li(r2, rt); 2743 beq(rs, scratch, offset32);
2482 beq(rs, r2, offset);
2483 } 2744 }
2484 break; 2745 break;
2485 case ne: 2746 case ne:
2486 if (rt.imm64_ == 0) { 2747 if (IsZero(rt)) {
2487 bne(rs, zero_reg, offset); 2748 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2749 bne(rs, zero_reg, offset32);
2488 } else { 2750 } else {
2489 // We don't want any other register but scratch clobbered. 2751 // We don't want any other register but scratch clobbered.
2490 DCHECK(!scratch.is(rs)); 2752 scratch = GetRtAsRegisterHelper(rt, scratch);
2491 r2 = scratch; 2753 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2492 li(r2, rt); 2754 bne(rs, scratch, offset32);
2493 bne(rs, r2, offset); 2755 }
2494 } 2756 break;
2495 break; 2757
2496 // Signed comparison. 2758 // Signed comparison.
2497 case greater: 2759 case greater:
2498 if (rt.imm64_ == 0) { 2760 if (IsZero(rt)) {
2499 bgtz(rs, offset); 2761 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2500 } else { 2762 bgtz(rs, offset32);
2501 r2 = scratch; 2763 } else {
2502 li(r2, rt); 2764 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2503 slt(scratch, r2, rs); 2765 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2504 bne(scratch, zero_reg, offset); 2766 bne(scratch, zero_reg, offset32);
2505 } 2767 }
2506 break; 2768 break;
2507 case greater_equal: 2769 case greater_equal:
2508 if (rt.imm64_ == 0) { 2770 if (IsZero(rt)) {
2509 bgez(rs, offset); 2771 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2510 } else if (is_int16(rt.imm64_)) { 2772 bgez(rs, offset32);
2511 slti(scratch, rs, static_cast<int32_t>(rt.imm64_)); 2773 } else {
2512 beq(scratch, zero_reg, offset); 2774 Slt(scratch, rs, rt);
2513 } else { 2775 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2514 r2 = scratch; 2776 beq(scratch, zero_reg, offset32);
2515 li(r2, rt);
2516 slt(scratch, rs, r2);
2517 beq(scratch, zero_reg, offset);
2518 } 2777 }
2519 break; 2778 break;
2520 case less: 2779 case less:
2521 if (rt.imm64_ == 0) { 2780 if (IsZero(rt)) {
2522 bltz(rs, offset); 2781 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2523 } else if (is_int16(rt.imm64_)) { 2782 bltz(rs, offset32);
2524 slti(scratch, rs, static_cast<int32_t>(rt.imm64_)); 2783 } else {
2525 bne(scratch, zero_reg, offset); 2784 Slt(scratch, rs, rt);
2526 } else { 2785 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2527 r2 = scratch; 2786 bne(scratch, zero_reg, offset32);
2528 li(r2, rt);
2529 slt(scratch, rs, r2);
2530 bne(scratch, zero_reg, offset);
2531 } 2787 }
2532 break; 2788 break;
2533 case less_equal: 2789 case less_equal:
2534 if (rt.imm64_ == 0) { 2790 if (IsZero(rt)) {
2535 blez(rs, offset); 2791 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2536 } else { 2792 blez(rs, offset32);
2537 r2 = scratch; 2793 } else {
2538 li(r2, rt); 2794 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2539 slt(scratch, r2, rs); 2795 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2540 beq(scratch, zero_reg, offset); 2796 beq(scratch, zero_reg, offset32);
2541 } 2797 }
2542 break; 2798 break;
2799
2543 // Unsigned comparison. 2800 // Unsigned comparison.
2544 case Ugreater: 2801 case Ugreater:
2545 if (rt.imm64_ == 0) { 2802 if (IsZero(rt)) {
2546 bne(rs, zero_reg, offset); 2803 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2547 } else { 2804 bne(rs, zero_reg, offset32);
2548 r2 = scratch; 2805 } else {
2549 li(r2, rt); 2806 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2550 sltu(scratch, r2, rs); 2807 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2551 bne(scratch, zero_reg, offset); 2808 bne(scratch, zero_reg, offset32);
2552 } 2809 }
2553 break; 2810 break;
2554 case Ugreater_equal: 2811 case Ugreater_equal:
2555 if (rt.imm64_ == 0) { 2812 if (IsZero(rt)) {
2556 b(offset); 2813 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2557 } else if (is_int16(rt.imm64_)) { 2814 b(offset32);
2558 sltiu(scratch, rs, static_cast<int32_t>(rt.imm64_)); 2815 } else {
2559 beq(scratch, zero_reg, offset); 2816 Sltu(scratch, rs, rt);
2560 } else { 2817 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2561 r2 = scratch; 2818 beq(scratch, zero_reg, offset32);
2562 li(r2, rt);
2563 sltu(scratch, rs, r2);
2564 beq(scratch, zero_reg, offset);
2565 } 2819 }
2566 break; 2820 break;
2567 case Uless: 2821 case Uless:
2568 if (rt.imm64_ == 0) { 2822 if (IsZero(rt)) {
2569 // No code needs to be emitted. 2823 return true; // No code needs to be emitted.
2570 return; 2824 } else {
2571 } else if (is_int16(rt.imm64_)) { 2825 Sltu(scratch, rs, rt);
2572 sltiu(scratch, rs, static_cast<int32_t>(rt.imm64_)); 2826 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2573 bne(scratch, zero_reg, offset); 2827 bne(scratch, zero_reg, offset32);
2574 } else {
2575 r2 = scratch;
2576 li(r2, rt);
2577 sltu(scratch, rs, r2);
2578 bne(scratch, zero_reg, offset);
2579 } 2828 }
2580 break; 2829 break;
2581 case Uless_equal: 2830 case Uless_equal:
2582 if (rt.imm64_ == 0) { 2831 if (IsZero(rt)) {
2583 beq(rs, zero_reg, offset); 2832 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2584 } else { 2833 beq(rs, zero_reg, offset32);
2585 r2 = scratch; 2834 } else {
2586 li(r2, rt); 2835 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
2587 sltu(scratch, r2, rs); 2836 offset32 = GetOffset(offset, L, OffsetSize::kOffset16);
2588 beq(scratch, zero_reg, offset); 2837 beq(scratch, zero_reg, offset32);
2589 } 2838 }
2590 break; 2839 break;
2591 default: 2840 default:
2592 UNREACHABLE(); 2841 UNREACHABLE();
2593 } 2842 }
2594 } 2843 }
2844
2595 // Emit a nop in the branch delay slot if required. 2845 // Emit a nop in the branch delay slot if required.
2596 if (bdslot == PROTECT) 2846 if (bdslot == PROTECT)
2597 nop(); 2847 nop();
2598 } 2848
2599 2849 return true;
2600 2850 }
2601 void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) { 2851
2602 // We use branch_offset as an argument for the branch instructions to be sure 2852
2603 // it is called just before generating the branch instruction, as needed. 2853 bool MacroAssembler::BranchShortCheck(int32_t offset, Label* L, Condition cond,
2604 2854 Register rs, const Operand& rt,
2605 b(shifted_branch_offset(L, false)); 2855 BranchDelaySlot bdslot) {
2606 2856 BRANCH_ARGS_CHECK(cond, rs, rt);
2607 // Emit a nop in the branch delay slot if required. 2857
2608 if (bdslot == PROTECT) 2858 if (!L) {
2609 nop(); 2859 if (kArchVariant == kMips64r6 && bdslot == PROTECT) {
2860 DCHECK(is_int26(offset));
2861 return BranchShortHelperR6(offset, nullptr, cond, rs, rt);
2862 } else {
2863 DCHECK(is_int16(offset));
2864 return BranchShortHelper(offset, nullptr, cond, rs, rt, bdslot);
2865 }
2866 } else {
2867 DCHECK(offset == 0);
2868 if (kArchVariant == kMips64r6 && bdslot == PROTECT) {
2869 return BranchShortHelperR6(0, L, cond, rs, rt);
2870 } else {
2871 return BranchShortHelper(0, L, cond, rs, rt, bdslot);
2872 }
2873 }
2874 return false;
2875 }
2876
2877
2878 void MacroAssembler::BranchShort(int32_t offset, Condition cond, Register rs,
2879 const Operand& rt, BranchDelaySlot bdslot) {
2880 BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2610 } 2881 }
2611 2882
2612 2883
2613 void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs, 2884 void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs,
2614 const Operand& rt, 2885 const Operand& rt, BranchDelaySlot bdslot) {
2615 BranchDelaySlot bdslot) { 2886 BranchShortCheck(0, L, cond, rs, rt, bdslot);
2616 BRANCH_ARGS_CHECK(cond, rs, rt); 2887 }
2617 2888
2618 int32_t offset = 0; 2889
2619 Register r2 = no_reg; 2890 void MacroAssembler::BranchAndLink(int32_t offset, BranchDelaySlot bdslot) {
2620 Register scratch = at;
2621 if (rt.is_reg()) {
2622 BlockTrampolinePoolScope block_trampoline_pool(this);
2623 r2 = rt.rm_;
2624 // Be careful to always use shifted_branch_offset only just before the
2625 // branch instruction, as the location will be remember for patching the
2626 // target.
2627 switch (cond) {
2628 case cc_always:
2629 offset = shifted_branch_offset(L, false);
2630 b(offset);
2631 break;
2632 case eq:
2633 offset = shifted_branch_offset(L, false);
2634 beq(rs, r2, offset);
2635 break;
2636 case ne:
2637 offset = shifted_branch_offset(L, false);
2638 bne(rs, r2, offset);
2639 break;
2640 // Signed comparison.
2641 case greater:
2642 if (r2.is(zero_reg)) {
2643 offset = shifted_branch_offset(L, false);
2644 bgtz(rs, offset);
2645 } else {
2646 slt(scratch, r2, rs);
2647 offset = shifted_branch_offset(L, false);
2648 bne(scratch, zero_reg, offset);
2649 }
2650 break;
2651 case greater_equal:
2652 if (r2.is(zero_reg)) {
2653 offset = shifted_branch_offset(L, false);
2654 bgez(rs, offset);
2655 } else {
2656 slt(scratch, rs, r2);
2657 offset = shifted_branch_offset(L, false);
2658 beq(scratch, zero_reg, offset);
2659 }
2660 break;
2661 case less:
2662 if (r2.is(zero_reg)) {
2663 offset = shifted_branch_offset(L, false);
2664 bltz(rs, offset);
2665 } else {
2666 slt(scratch, rs, r2);
2667 offset = shifted_branch_offset(L, false);
2668 bne(scratch, zero_reg, offset);
2669 }
2670 break;
2671 case less_equal:
2672 if (r2.is(zero_reg)) {
2673 offset = shifted_branch_offset(L, false);
2674 blez(rs, offset);
2675 } else {
2676 slt(scratch, r2, rs);
2677 offset = shifted_branch_offset(L, false);
2678 beq(scratch, zero_reg, offset);
2679 }
2680 break;
2681 // Unsigned comparison.
2682 case Ugreater:
2683 if (r2.is(zero_reg)) {
2684 offset = shifted_branch_offset(L, false);
2685 bne(rs, zero_reg, offset);
2686 } else {
2687 sltu(scratch, r2, rs);
2688 offset = shifted_branch_offset(L, false);
2689 bne(scratch, zero_reg, offset);
2690 }
2691 break;
2692 case Ugreater_equal:
2693 if (r2.is(zero_reg)) {
2694 offset = shifted_branch_offset(L, false);
2695 b(offset);
2696 } else {
2697 sltu(scratch, rs, r2);
2698 offset = shifted_branch_offset(L, false);
2699 beq(scratch, zero_reg, offset);
2700 }
2701 break;
2702 case Uless:
2703 if (r2.is(zero_reg)) {
2704 // No code needs to be emitted.
2705 return;
2706 } else {
2707 sltu(scratch, rs, r2);
2708 offset = shifted_branch_offset(L, false);
2709 bne(scratch, zero_reg, offset);
2710 }
2711 break;
2712 case Uless_equal:
2713 if (r2.is(zero_reg)) {
2714 offset = shifted_branch_offset(L, false);
2715 beq(rs, zero_reg, offset);
2716 } else {
2717 sltu(scratch, r2, rs);
2718 offset = shifted_branch_offset(L, false);
2719 beq(scratch, zero_reg, offset);
2720 }
2721 break;
2722 default:
2723 UNREACHABLE();
2724 }
2725 } else {
2726 // Be careful to always use shifted_branch_offset only just before the
2727 // branch instruction, as the location will be remember for patching the
2728 // target.
2729 BlockTrampolinePoolScope block_trampoline_pool(this);
2730 switch (cond) {
2731 case cc_always:
2732 offset = shifted_branch_offset(L, false);
2733 b(offset);
2734 break;
2735 case eq:
2736 if (rt.imm64_ == 0) {
2737 offset = shifted_branch_offset(L, false);
2738 beq(rs, zero_reg, offset);
2739 } else {
2740 DCHECK(!scratch.is(rs));
2741 r2 = scratch;
2742 li(r2, rt);
2743 offset = shifted_branch_offset(L, false);
2744 beq(rs, r2, offset);
2745 }
2746 break;
2747 case ne:
2748 if (rt.imm64_ == 0) {
2749 offset = shifted_branch_offset(L, false);
2750 bne(rs, zero_reg, offset);
2751 } else {
2752 DCHECK(!scratch.is(rs));
2753 r2 = scratch;
2754 li(r2, rt);
2755 offset = shifted_branch_offset(L, false);
2756 bne(rs, r2, offset);
2757 }
2758 break;
2759 // Signed comparison.
2760 case greater:
2761 if (rt.imm64_ == 0) {
2762 offset = shifted_branch_offset(L, false);
2763 bgtz(rs, offset);
2764 } else {
2765 DCHECK(!scratch.is(rs));
2766 r2 = scratch;
2767 li(r2, rt);
2768 slt(scratch, r2, rs);
2769 offset = shifted_branch_offset(L, false);
2770 bne(scratch, zero_reg, offset);
2771 }
2772 break;
2773 case greater_equal:
2774 if (rt.imm64_ == 0) {
2775 offset = shifted_branch_offset(L, false);
2776 bgez(rs, offset);
2777 } else if (is_int16(rt.imm64_)) {
2778 slti(scratch, rs, static_cast<int32_t>(rt.imm64_));
2779 offset = shifted_branch_offset(L, false);
2780 beq(scratch, zero_reg, offset);
2781 } else {
2782 DCHECK(!scratch.is(rs));
2783 r2 = scratch;
2784 li(r2, rt);
2785 slt(scratch, rs, r2);
2786 offset = shifted_branch_offset(L, false);
2787 beq(scratch, zero_reg, offset);
2788 }
2789 break;
2790 case less:
2791 if (rt.imm64_ == 0) {
2792 offset = shifted_branch_offset(L, false);
2793 bltz(rs, offset);
2794 } else if (is_int16(rt.imm64_)) {
2795 slti(scratch, rs, static_cast<int32_t>(rt.imm64_));
2796 offset = shifted_branch_offset(L, false);
2797 bne(scratch, zero_reg, offset);
2798 } else {
2799 DCHECK(!scratch.is(rs));
2800 r2 = scratch;
2801 li(r2, rt);
2802 slt(scratch, rs, r2);
2803 offset = shifted_branch_offset(L, false);
2804 bne(scratch, zero_reg, offset);
2805 }
2806 break;
2807 case less_equal:
2808 if (rt.imm64_ == 0) {
2809 offset = shifted_branch_offset(L, false);
2810 blez(rs, offset);
2811 } else {
2812 DCHECK(!scratch.is(rs));
2813 r2 = scratch;
2814 li(r2, rt);
2815 slt(scratch, r2, rs);
2816 offset = shifted_branch_offset(L, false);
2817 beq(scratch, zero_reg, offset);
2818 }
2819 break;
2820 // Unsigned comparison.
2821 case Ugreater:
2822 if (rt.imm64_ == 0) {
2823 offset = shifted_branch_offset(L, false);
2824 bne(rs, zero_reg, offset);
2825 } else {
2826 DCHECK(!scratch.is(rs));
2827 r2 = scratch;
2828 li(r2, rt);
2829 sltu(scratch, r2, rs);
2830 offset = shifted_branch_offset(L, false);
2831 bne(scratch, zero_reg, offset);
2832 }
2833 break;
2834 case Ugreater_equal:
2835 if (rt.imm64_ == 0) {
2836 offset = shifted_branch_offset(L, false);
2837 b(offset);
2838 } else if (is_int16(rt.imm64_)) {
2839 sltiu(scratch, rs, static_cast<int32_t>(rt.imm64_));
2840 offset = shifted_branch_offset(L, false);
2841 beq(scratch, zero_reg, offset);
2842 } else {
2843 DCHECK(!scratch.is(rs));
2844 r2 = scratch;
2845 li(r2, rt);
2846 sltu(scratch, rs, r2);
2847 offset = shifted_branch_offset(L, false);
2848 beq(scratch, zero_reg, offset);
2849 }
2850 break;
2851 case Uless:
2852 if (rt.imm64_ == 0) {
2853 // No code needs to be emitted.
2854 return;
2855 } else if (is_int16(rt.imm64_)) {
2856 sltiu(scratch, rs, static_cast<int32_t>(rt.imm64_));
2857 offset = shifted_branch_offset(L, false);
2858 bne(scratch, zero_reg, offset);
2859 } else {
2860 DCHECK(!scratch.is(rs));
2861 r2 = scratch;
2862 li(r2, rt);
2863 sltu(scratch, rs, r2);
2864 offset = shifted_branch_offset(L, false);
2865 bne(scratch, zero_reg, offset);
2866 }
2867 break;
2868 case Uless_equal:
2869 if (rt.imm64_ == 0) {
2870 offset = shifted_branch_offset(L, false);
2871 beq(rs, zero_reg, offset);
2872 } else {
2873 DCHECK(!scratch.is(rs));
2874 r2 = scratch;
2875 li(r2, rt);
2876 sltu(scratch, r2, rs);
2877 offset = shifted_branch_offset(L, false);
2878 beq(scratch, zero_reg, offset);
2879 }
2880 break;
2881 default:
2882 UNREACHABLE();
2883 }
2884 }
2885 // Check that offset could actually hold on an int16_t.
2886 DCHECK(is_int16(offset));
2887 // Emit a nop in the branch delay slot if required.
2888 if (bdslot == PROTECT)
2889 nop();
2890 }
2891
2892
2893 void MacroAssembler::BranchAndLink(int16_t offset, BranchDelaySlot bdslot) {
2894 BranchAndLinkShort(offset, bdslot); 2891 BranchAndLinkShort(offset, bdslot);
2895 } 2892 }
2896 2893
2897 2894
2898 void MacroAssembler::BranchAndLink(int16_t offset, Condition cond, Register rs, 2895 void MacroAssembler::BranchAndLink(int32_t offset, Condition cond, Register rs,
2899 const Operand& rt, 2896 const Operand& rt, BranchDelaySlot bdslot) {
2900 BranchDelaySlot bdslot) { 2897 bool is_near = BranchAndLinkShortCheck(offset, nullptr, cond, rs, rt, bdslot);
2901 BranchAndLinkShort(offset, cond, rs, rt, bdslot); 2898 DCHECK(is_near);
2899 USE(is_near);
2902 } 2900 }
2903 2901
2904 2902
2905 void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) { 2903 void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) {
2906 if (L->is_bound()) { 2904 if (L->is_bound()) {
2907 if (is_near(L)) { 2905 if (is_near_branch(L)) {
2908 BranchAndLinkShort(L, bdslot); 2906 BranchAndLinkShort(L, bdslot);
2909 } else { 2907 } else {
2910 Jal(L, bdslot); 2908 BranchAndLinkLong(L, bdslot);
2911 } 2909 }
2912 } else { 2910 } else {
2913 if (is_trampoline_emitted()) { 2911 if (is_trampoline_emitted()) {
2914 Jal(L, bdslot); 2912 BranchAndLinkLong(L, bdslot);
2915 } else { 2913 } else {
2916 BranchAndLinkShort(L, bdslot); 2914 BranchAndLinkShort(L, bdslot);
2917 } 2915 }
2918 } 2916 }
2919 } 2917 }
2920 2918
2921 2919
2922 void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs, 2920 void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs,
2923 const Operand& rt, 2921 const Operand& rt,
2924 BranchDelaySlot bdslot) { 2922 BranchDelaySlot bdslot) {
2925 if (L->is_bound()) { 2923 if (L->is_bound()) {
2926 if (is_near(L)) { 2924 if (!BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot)) {
2927 BranchAndLinkShort(L, cond, rs, rt, bdslot);
2928 } else {
2929 Label skip; 2925 Label skip;
2930 Condition neg_cond = NegateCondition(cond); 2926 Condition neg_cond = NegateCondition(cond);
2931 BranchShort(&skip, neg_cond, rs, rt); 2927 BranchShort(&skip, neg_cond, rs, rt);
2932 Jal(L, bdslot); 2928 BranchAndLinkLong(L, bdslot);
2933 bind(&skip); 2929 bind(&skip);
2934 } 2930 }
2935 } else { 2931 } else {
2936 if (is_trampoline_emitted()) { 2932 if (is_trampoline_emitted()) {
2937 Label skip; 2933 Label skip;
2938 Condition neg_cond = NegateCondition(cond); 2934 Condition neg_cond = NegateCondition(cond);
2939 BranchShort(&skip, neg_cond, rs, rt); 2935 BranchShort(&skip, neg_cond, rs, rt);
2940 Jal(L, bdslot); 2936 BranchAndLinkLong(L, bdslot);
2941 bind(&skip); 2937 bind(&skip);
2942 } else { 2938 } else {
2943 BranchAndLinkShort(L, cond, rs, rt, bdslot); 2939 BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot);
2944 } 2940 }
2945 } 2941 }
2946 } 2942 }
2947 2943
2948 2944
2949 // We need to use a bgezal or bltzal, but they can't be used directly with the 2945 void MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L,
2950 // slt instructions. We could use sub or add instead but we would miss overflow 2946 BranchDelaySlot bdslot) {
2951 // cases, so we keep slt and add an intermediate third instruction. 2947 DCHECK(L == nullptr || offset == 0);
2952 void MacroAssembler::BranchAndLinkShort(int16_t offset, 2948 offset = GetOffset(offset, L, OffsetSize::kOffset16);
2953 BranchDelaySlot bdslot) {
2954 bal(offset); 2949 bal(offset);
2955 2950
2956 // Emit a nop in the branch delay slot if required. 2951 // Emit a nop in the branch delay slot if required.
2957 if (bdslot == PROTECT) 2952 if (bdslot == PROTECT)
2958 nop(); 2953 nop();
2959 } 2954 }
2960 2955
2961 2956
2962 void MacroAssembler::BranchAndLinkShort(int16_t offset, Condition cond, 2957 void MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L) {
2963 Register rs, const Operand& rt, 2958 DCHECK(L == nullptr || offset == 0);
2959 offset = GetOffset(offset, L, OffsetSize::kOffset26);
2960 balc(offset);
2961 }
2962
2963
2964 void MacroAssembler::BranchAndLinkShort(int32_t offset,
2964 BranchDelaySlot bdslot) { 2965 BranchDelaySlot bdslot) {
2965 BRANCH_ARGS_CHECK(cond, rs, rt); 2966 if (kArchVariant == kMips64r6 && bdslot == PROTECT) {
2966 Register r2 = no_reg; 2967 DCHECK(is_int26(offset));
2967 Register scratch = at; 2968 BranchAndLinkShortHelperR6(offset, nullptr);
2968 2969 } else {
2969 if (rt.is_reg()) { 2970 DCHECK(is_int16(offset));
2970 r2 = rt.rm_; 2971 BranchAndLinkShortHelper(offset, nullptr, bdslot);
2971 } else if (cond != cc_always) { 2972 }
2972 r2 = scratch; 2973 }
2973 li(r2, rt); 2974
2974 } 2975
2975 2976 void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) {
2976 { 2977 if (kArchVariant == kMips64r6 && bdslot == PROTECT) {
2977 BlockTrampolinePoolScope block_trampoline_pool(this); 2978 BranchAndLinkShortHelperR6(0, L);
2978 switch (cond) { 2979 } else {
2979 case cc_always: 2980 BranchAndLinkShortHelper(0, L, bdslot);
2980 bal(offset); 2981 }
2981 break; 2982 }
2982 case eq: 2983
2983 bne(rs, r2, 2); 2984
2984 nop(); 2985 bool MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L,
2985 bal(offset); 2986 Condition cond, Register rs,
2986 break; 2987 const Operand& rt) {
2987 case ne: 2988 DCHECK(L == nullptr || offset == 0);
2988 beq(rs, r2, 2); 2989 Register scratch = rs.is(at) ? t8 : at;
2989 nop(); 2990 OffsetSize bits = OffsetSize::kOffset16;
2990 bal(offset); 2991
2991 break; 2992 BlockTrampolinePoolScope block_trampoline_pool(this);
2992 2993 DCHECK((cond == cc_always && is_int26(offset)) || is_int16(offset));
2993 // Signed comparison. 2994 switch (cond) {
2994 case greater: 2995 case cc_always:
2995 // rs > rt 2996 bits = OffsetSize::kOffset26;
2996 slt(scratch, r2, rs); 2997 if (!is_near(L, bits)) return false;
2997 beq(scratch, zero_reg, 2); 2998 offset = GetOffset(offset, L, bits);
2998 nop(); 2999 balc(offset);
2999 bal(offset); 3000 break;
3000 break; 3001 case eq:
3001 case greater_equal: 3002 if (!is_near(L, bits)) return false;
3002 // rs >= rt 3003 Subu(scratch, rs, rt);
3003 slt(scratch, rs, r2); 3004 offset = GetOffset(offset, L, bits);
3004 bne(scratch, zero_reg, 2); 3005 beqzalc(scratch, offset);
3005 nop(); 3006 break;
3006 bal(offset); 3007 case ne:
3007 break; 3008 if (!is_near(L, bits)) return false;
3008 case less: 3009 Subu(scratch, rs, rt);
3009 // rs < r2 3010 offset = GetOffset(offset, L, bits);
3010 slt(scratch, rs, r2); 3011 bnezalc(scratch, offset);
3011 bne(scratch, zero_reg, 2); 3012 break;
3012 nop(); 3013
3013 bal(offset); 3014 // Signed comparison.
3014 break; 3015 case greater:
3015 case less_equal: 3016 // rs > rt
3016 // rs <= r2 3017 if (rs.code() == rt.rm_.reg_code) {
3017 slt(scratch, r2, rs); 3018 break; // No code needs to be emitted.
3018 bne(scratch, zero_reg, 2); 3019 } else if (rs.is(zero_reg)) {
3019 nop(); 3020 if (!is_near(L, bits)) return false;
3020 bal(offset); 3021 scratch = GetRtAsRegisterHelper(rt, scratch);
3021 break; 3022 offset = GetOffset(offset, L, bits);
3022 3023 bltzalc(scratch, offset);
3023 3024 } else if (IsZero(rt)) {
3024 // Unsigned comparison. 3025 if (!is_near(L, bits)) return false;
3025 case Ugreater: 3026 offset = GetOffset(offset, L, bits);
3026 // rs > rt 3027 bgtzalc(rs, offset);
3027 sltu(scratch, r2, rs); 3028 } else {
3028 beq(scratch, zero_reg, 2); 3029 if (!is_near(L, bits)) return false;
3029 nop(); 3030 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3030 bal(offset); 3031 offset = GetOffset(offset, L, bits);
3031 break; 3032 bnezalc(scratch, offset);
3032 case Ugreater_equal: 3033 }
3033 // rs >= rt 3034 break;
3034 sltu(scratch, rs, r2); 3035 case greater_equal:
3035 bne(scratch, zero_reg, 2); 3036 // rs >= rt
3036 nop(); 3037 if (rs.code() == rt.rm_.reg_code) {
3037 bal(offset); 3038 bits = OffsetSize::kOffset26;
3038 break; 3039 if (!is_near(L, bits)) return false;
3039 case Uless: 3040 offset = GetOffset(offset, L, bits);
3040 // rs < r2 3041 balc(offset);
3041 sltu(scratch, rs, r2); 3042 } else if (rs.is(zero_reg)) {
3042 bne(scratch, zero_reg, 2); 3043 if (!is_near(L, bits)) return false;
3043 nop(); 3044 scratch = GetRtAsRegisterHelper(rt, scratch);
3044 bal(offset); 3045 offset = GetOffset(offset, L, bits);
3045 break; 3046 blezalc(scratch, offset);
3046 case Uless_equal: 3047 } else if (IsZero(rt)) {
3047 // rs <= r2 3048 if (!is_near(L, bits)) return false;
3048 sltu(scratch, r2, rs); 3049 offset = GetOffset(offset, L, bits);
3049 bne(scratch, zero_reg, 2); 3050 bgezalc(rs, offset);
3050 nop(); 3051 } else {
3051 bal(offset); 3052 if (!is_near(L, bits)) return false;
3052 break; 3053 Slt(scratch, rs, rt);
3053 default: 3054 offset = GetOffset(offset, L, bits);
3054 UNREACHABLE(); 3055 beqzalc(scratch, offset);
3055 } 3056 }
3056 } 3057 break;
3058 case less:
3059 // rs < rt
3060 if (rs.code() == rt.rm_.reg_code) {
3061 break; // No code needs to be emitted.
3062 } else if (rs.is(zero_reg)) {
3063 if (!is_near(L, bits)) return false;
3064 scratch = GetRtAsRegisterHelper(rt, scratch);
3065 offset = GetOffset(offset, L, bits);
3066 bgtzalc(scratch, offset);
3067 } else if (IsZero(rt)) {
3068 if (!is_near(L, bits)) return false;
3069 offset = GetOffset(offset, L, bits);
3070 bltzalc(rs, offset);
3071 } else {
3072 if (!is_near(L, bits)) return false;
3073 Slt(scratch, rs, rt);
3074 offset = GetOffset(offset, L, bits);
3075 bnezalc(scratch, offset);
3076 }
3077 break;
3078 case less_equal:
3079 // rs <= r2
3080 if (rs.code() == rt.rm_.reg_code) {
3081 bits = OffsetSize::kOffset26;
3082 if (!is_near(L, bits)) return false;
3083 offset = GetOffset(offset, L, bits);
3084 balc(offset);
3085 } else if (rs.is(zero_reg)) {
3086 if (!is_near(L, bits)) return false;
3087 scratch = GetRtAsRegisterHelper(rt, scratch);
3088 offset = GetOffset(offset, L, bits);
3089 bgezalc(scratch, offset);
3090 } else if (IsZero(rt)) {
3091 if (!is_near(L, bits)) return false;
3092 offset = GetOffset(offset, L, bits);
3093 blezalc(rs, offset);
3094 } else {
3095 if (!is_near(L, bits)) return false;
3096 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3097 offset = GetOffset(offset, L, bits);
3098 beqzalc(scratch, offset);
3099 }
3100 break;
3101
3102
3103 // Unsigned comparison.
3104 case Ugreater:
3105 // rs > r2
3106 if (!is_near(L, bits)) return false;
3107 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3108 offset = GetOffset(offset, L, bits);
3109 bnezalc(scratch, offset);
3110 break;
3111 case Ugreater_equal:
3112 // rs >= r2
3113 if (!is_near(L, bits)) return false;
3114 Sltu(scratch, rs, rt);
3115 offset = GetOffset(offset, L, bits);
3116 beqzalc(scratch, offset);
3117 break;
3118 case Uless:
3119 // rs < r2
3120 if (!is_near(L, bits)) return false;
3121 Sltu(scratch, rs, rt);
3122 offset = GetOffset(offset, L, bits);
3123 bnezalc(scratch, offset);
3124 break;
3125 case Uless_equal:
3126 // rs <= r2
3127 if (!is_near(L, bits)) return false;
3128 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3129 offset = GetOffset(offset, L, bits);
3130 beqzalc(scratch, offset);
3131 break;
3132 default:
3133 UNREACHABLE();
3134 }
3135 return true;
3136 }
3137
3138
3139 // Pre r6 we need to use a bgezal or bltzal, but they can't be used directly
3140 // with the slt instructions. We could use sub or add instead but we would miss
3141 // overflow cases, so we keep slt and add an intermediate third instruction.
3142 bool MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L,
3143 Condition cond, Register rs,
3144 const Operand& rt,
3145 BranchDelaySlot bdslot) {
3146 DCHECK(L == nullptr || offset == 0);
3147 if (!is_near(L, OffsetSize::kOffset16)) return false;
3148
3149 Register scratch = t8;
3150 BlockTrampolinePoolScope block_trampoline_pool(this);
3151
3152 switch (cond) {
3153 case cc_always:
3154 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3155 bal(offset);
3156 break;
3157 case eq:
3158 bne(rs, GetRtAsRegisterHelper(rt, scratch), 2);
3159 nop();
3160 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3161 bal(offset);
3162 break;
3163 case ne:
3164 beq(rs, GetRtAsRegisterHelper(rt, scratch), 2);
3165 nop();
3166 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3167 bal(offset);
3168 break;
3169
3170 // Signed comparison.
3171 case greater:
3172 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3173 addiu(scratch, scratch, -1);
3174 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3175 bgezal(scratch, offset);
3176 break;
3177 case greater_equal:
3178 Slt(scratch, rs, rt);
3179 addiu(scratch, scratch, -1);
3180 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3181 bltzal(scratch, offset);
3182 break;
3183 case less:
3184 Slt(scratch, rs, rt);
3185 addiu(scratch, scratch, -1);
3186 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3187 bgezal(scratch, offset);
3188 break;
3189 case less_equal:
3190 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3191 addiu(scratch, scratch, -1);
3192 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3193 bltzal(scratch, offset);
3194 break;
3195
3196 // Unsigned comparison.
3197 case Ugreater:
3198 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3199 addiu(scratch, scratch, -1);
3200 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3201 bgezal(scratch, offset);
3202 break;
3203 case Ugreater_equal:
3204 Sltu(scratch, rs, rt);
3205 addiu(scratch, scratch, -1);
3206 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3207 bltzal(scratch, offset);
3208 break;
3209 case Uless:
3210 Sltu(scratch, rs, rt);
3211 addiu(scratch, scratch, -1);
3212 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3213 bgezal(scratch, offset);
3214 break;
3215 case Uless_equal:
3216 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs);
3217 addiu(scratch, scratch, -1);
3218 offset = GetOffset(offset, L, OffsetSize::kOffset16);
3219 bltzal(scratch, offset);
3220 break;
3221
3222 default:
3223 UNREACHABLE();
3224 }
3225
3057 // Emit a nop in the branch delay slot if required. 3226 // Emit a nop in the branch delay slot if required.
3058 if (bdslot == PROTECT) 3227 if (bdslot == PROTECT)
3059 nop(); 3228 nop();
3060 } 3229
3061 3230 return true;
3062 3231 }
3063 void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) { 3232
3064 bal(shifted_branch_offset(L, false)); 3233
3065 3234 bool MacroAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L,
3066 // Emit a nop in the branch delay slot if required. 3235 Condition cond, Register rs,
3067 if (bdslot == PROTECT) 3236 const Operand& rt,
3068 nop(); 3237 BranchDelaySlot bdslot) {
3069 }
3070
3071
3072 void MacroAssembler::BranchAndLinkShort(Label* L, Condition cond, Register rs,
3073 const Operand& rt,
3074 BranchDelaySlot bdslot) {
3075 BRANCH_ARGS_CHECK(cond, rs, rt); 3238 BRANCH_ARGS_CHECK(cond, rs, rt);
3076 3239
3077 int32_t offset = 0; 3240 if (!L) {
3078 Register r2 = no_reg; 3241 if (kArchVariant == kMips64r6 && bdslot == PROTECT) {
3079 Register scratch = at; 3242 DCHECK(is_int26(offset));
3080 if (rt.is_reg()) { 3243 return BranchAndLinkShortHelperR6(offset, nullptr, cond, rs, rt);
3081 r2 = rt.rm_; 3244 } else {
3082 } else if (cond != cc_always) { 3245 DCHECK(is_int16(offset));
3083 r2 = scratch; 3246 return BranchAndLinkShortHelper(offset, nullptr, cond, rs, rt, bdslot);
3084 li(r2, rt);
3085 }
3086
3087 {
3088 BlockTrampolinePoolScope block_trampoline_pool(this);
3089 switch (cond) {
3090 case cc_always:
3091 offset = shifted_branch_offset(L, false);
3092 bal(offset);
3093 break;
3094 case eq:
3095 bne(rs, r2, 2);
3096 nop();
3097 offset = shifted_branch_offset(L, false);
3098 bal(offset);
3099 break;
3100 case ne:
3101 beq(rs, r2, 2);
3102 nop();
3103 offset = shifted_branch_offset(L, false);
3104 bal(offset);
3105 break;
3106
3107 // Signed comparison.
3108 case greater:
3109 // rs > rt
3110 slt(scratch, r2, rs);
3111 beq(scratch, zero_reg, 2);
3112 nop();
3113 offset = shifted_branch_offset(L, false);
3114 bal(offset);
3115 break;
3116 case greater_equal:
3117 // rs >= rt
3118 slt(scratch, rs, r2);
3119 bne(scratch, zero_reg, 2);
3120 nop();
3121 offset = shifted_branch_offset(L, false);
3122 bal(offset);
3123 break;
3124 case less:
3125 // rs < r2
3126 slt(scratch, rs, r2);
3127 bne(scratch, zero_reg, 2);
3128 nop();
3129 offset = shifted_branch_offset(L, false);
3130 bal(offset);
3131 break;
3132 case less_equal:
3133 // rs <= r2
3134 slt(scratch, r2, rs);
3135 bne(scratch, zero_reg, 2);
3136 nop();
3137 offset = shifted_branch_offset(L, false);
3138 bal(offset);
3139 break;
3140
3141
3142 // Unsigned comparison.
3143 case Ugreater:
3144 // rs > rt
3145 sltu(scratch, r2, rs);
3146 beq(scratch, zero_reg, 2);
3147 nop();
3148 offset = shifted_branch_offset(L, false);
3149 bal(offset);
3150 break;
3151 case Ugreater_equal:
3152 // rs >= rt
3153 sltu(scratch, rs, r2);
3154 bne(scratch, zero_reg, 2);
3155 nop();
3156 offset = shifted_branch_offset(L, false);
3157 bal(offset);
3158 break;
3159 case Uless:
3160 // rs < r2
3161 sltu(scratch, rs, r2);
3162 bne(scratch, zero_reg, 2);
3163 nop();
3164 offset = shifted_branch_offset(L, false);
3165 bal(offset);
3166 break;
3167 case Uless_equal:
3168 // rs <= r2
3169 sltu(scratch, r2, rs);
3170 bne(scratch, zero_reg, 2);
3171 nop();
3172 offset = shifted_branch_offset(L, false);
3173 bal(offset);
3174 break;
3175
3176 default:
3177 UNREACHABLE();
3178 } 3247 }
3179 } 3248 } else {
3180 // Check that offset could actually hold on an int16_t. 3249 DCHECK(offset == 0);
3181 DCHECK(is_int16(offset)); 3250 if (kArchVariant == kMips64r6 && bdslot == PROTECT) {
3182 3251 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt);
3183 // Emit a nop in the branch delay slot if required. 3252 } else {
3184 if (bdslot == PROTECT) 3253 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot);
3185 nop(); 3254 }
3186 } 3255 }
3187 3256 return false;
3188 3257 }
3258
3259
3189 void MacroAssembler::Jump(Register target, 3260 void MacroAssembler::Jump(Register target,
3190 Condition cond, 3261 Condition cond,
3191 Register rs, 3262 Register rs,
3192 const Operand& rt, 3263 const Operand& rt,
3193 BranchDelaySlot bd) { 3264 BranchDelaySlot bd) {
3194 BlockTrampolinePoolScope block_trampoline_pool(this); 3265 BlockTrampolinePoolScope block_trampoline_pool(this);
3195 if (cond == cc_always) { 3266 if (cond == cc_always) {
3196 jr(target); 3267 jr(target);
3197 } else { 3268 } else {
3198 BRANCH_ARGS_CHECK(cond, rs, rt); 3269 BRANCH_ARGS_CHECK(cond, rs, rt);
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after
3265 return size * kInstrSize; 3336 return size * kInstrSize;
3266 } 3337 }
3267 3338
3268 3339
3269 // Note: To call gcc-compiled C code on mips, you must call thru t9. 3340 // Note: To call gcc-compiled C code on mips, you must call thru t9.
3270 void MacroAssembler::Call(Register target, 3341 void MacroAssembler::Call(Register target,
3271 Condition cond, 3342 Condition cond,
3272 Register rs, 3343 Register rs,
3273 const Operand& rt, 3344 const Operand& rt,
3274 BranchDelaySlot bd) { 3345 BranchDelaySlot bd) {
3346 #ifdef DEBUG
3347 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0;
3348 #endif
3349
3275 BlockTrampolinePoolScope block_trampoline_pool(this); 3350 BlockTrampolinePoolScope block_trampoline_pool(this);
3276 Label start; 3351 Label start;
3277 bind(&start); 3352 bind(&start);
3278 if (cond == cc_always) { 3353 if (cond == cc_always) {
3279 jalr(target); 3354 jalr(target);
3280 } else { 3355 } else {
3281 BRANCH_ARGS_CHECK(cond, rs, rt); 3356 BRANCH_ARGS_CHECK(cond, rs, rt);
3282 Branch(2, NegateCondition(cond), rs, rt); 3357 Branch(2, NegateCondition(cond), rs, rt);
3283 jalr(target); 3358 jalr(target);
3284 } 3359 }
3285 // Emit a nop in the branch delay slot if required. 3360 // Emit a nop in the branch delay slot if required.
3286 if (bd == PROTECT) 3361 if (bd == PROTECT)
3287 nop(); 3362 nop();
3288 3363
3289 DCHECK_EQ(CallSize(target, cond, rs, rt, bd), 3364 #ifdef DEBUG
3290 SizeOfCodeGeneratedSince(&start)); 3365 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd),
3366 SizeOfCodeGeneratedSince(&start));
3367 #endif
3291 } 3368 }
3292 3369
3293 3370
3294 int MacroAssembler::CallSize(Address target, 3371 int MacroAssembler::CallSize(Address target,
3295 RelocInfo::Mode rmode, 3372 RelocInfo::Mode rmode,
3296 Condition cond, 3373 Condition cond,
3297 Register rs, 3374 Register rs,
3298 const Operand& rt, 3375 const Operand& rt,
3299 BranchDelaySlot bd) { 3376 BranchDelaySlot bd) {
3300 int size = CallSize(t9, cond, rs, rt, bd); 3377 int size = CallSize(t9, cond, rs, rt, bd);
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after
3358 3435
3359 3436
3360 void MacroAssembler::Ret(Condition cond, 3437 void MacroAssembler::Ret(Condition cond,
3361 Register rs, 3438 Register rs,
3362 const Operand& rt, 3439 const Operand& rt,
3363 BranchDelaySlot bd) { 3440 BranchDelaySlot bd) {
3364 Jump(ra, cond, rs, rt, bd); 3441 Jump(ra, cond, rs, rt, bd);
3365 } 3442 }
3366 3443
3367 3444
3368 void MacroAssembler::J(Label* L, BranchDelaySlot bdslot) { 3445 void MacroAssembler::BranchLong(Label* L, BranchDelaySlot bdslot) {
3369 BlockTrampolinePoolScope block_trampoline_pool(this); 3446 if (kArchVariant == kMips64r6 && bdslot == PROTECT &&
3370 { 3447 (!L->is_bound() || is_near_r6(L))) {
3371 BlockGrowBufferScope block_buf_growth(this); 3448 BranchShortHelperR6(0, L);
3372 // Buffer growth (and relocation) must be blocked for internal references 3449 } else {
3373 // until associated instructions are emitted and available to be patched. 3450 EmitPendingInstructions();
3374 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); 3451 BlockTrampolinePoolScope block_trampoline_pool(this);
3375 j(L); 3452 {
3453 BlockGrowBufferScope block_buf_growth(this);
3454 // Buffer growth (and relocation) must be blocked for internal references
3455 // until associated instructions are emitted and available to be patched.
3456 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3457 j(L);
3458 }
3459 // Emit a nop in the branch delay slot if required.
3460 if (bdslot == PROTECT) nop();
3376 } 3461 }
3377 // Emit a nop in the branch delay slot if required.
3378 if (bdslot == PROTECT) nop();
3379 } 3462 }
3380 3463
3381 3464
3382 void MacroAssembler::Jal(Label* L, BranchDelaySlot bdslot) { 3465 void MacroAssembler::BranchAndLinkLong(Label* L, BranchDelaySlot bdslot) {
3383 BlockTrampolinePoolScope block_trampoline_pool(this); 3466 if (kArchVariant == kMips64r6 && bdslot == PROTECT &&
3384 { 3467 (!L->is_bound() || is_near_r6(L))) {
3385 BlockGrowBufferScope block_buf_growth(this); 3468 BranchAndLinkShortHelperR6(0, L);
3386 // Buffer growth (and relocation) must be blocked for internal references 3469 } else {
3387 // until associated instructions are emitted and available to be patched. 3470 EmitPendingInstructions();
3388 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); 3471 BlockTrampolinePoolScope block_trampoline_pool(this);
3389 jal(L); 3472 {
3473 BlockGrowBufferScope block_buf_growth(this);
3474 // Buffer growth (and relocation) must be blocked for internal references
3475 // until associated instructions are emitted and available to be patched.
3476 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
3477 jal(L);
3478 }
3479 // Emit a nop in the branch delay slot if required.
3480 if (bdslot == PROTECT) nop();
3390 } 3481 }
3391 // Emit a nop in the branch delay slot if required.
3392 if (bdslot == PROTECT) nop();
3393 } 3482 }
3394 3483
3395 3484
3396 void MacroAssembler::Jr(Label* L, BranchDelaySlot bdslot) { 3485 void MacroAssembler::Jr(Label* L, BranchDelaySlot bdslot) {
3397 BlockTrampolinePoolScope block_trampoline_pool(this); 3486 BlockTrampolinePoolScope block_trampoline_pool(this);
3398 3487
3399 uint64_t imm64; 3488 uint64_t imm64;
3400 imm64 = jump_address(L); 3489 imm64 = jump_address(L);
3401 { BlockGrowBufferScope block_buf_growth(this); 3490 { BlockGrowBufferScope block_buf_growth(this);
3402 // Buffer growth (and relocation) must be blocked for internal references 3491 // Buffer growth (and relocation) must be blocked for internal references
(...skipping 2869 matching lines...) Expand 10 before | Expand all | Expand 10 after
6272 void CodePatcher::Emit(Instr instr) { 6361 void CodePatcher::Emit(Instr instr) {
6273 masm()->emit(instr); 6362 masm()->emit(instr);
6274 } 6363 }
6275 6364
6276 6365
6277 void CodePatcher::Emit(Address addr) { 6366 void CodePatcher::Emit(Address addr) {
6278 // masm()->emit(reinterpret_cast<Instr>(addr)); 6367 // masm()->emit(reinterpret_cast<Instr>(addr));
6279 } 6368 }
6280 6369
6281 6370
6282 void CodePatcher::ChangeBranchCondition(Condition cond) { 6371 void CodePatcher::ChangeBranchCondition(Instr current_instr,
6283 Instr instr = Assembler::instr_at(masm_.pc_); 6372 uint32_t new_opcode) {
6284 DCHECK(Assembler::IsBranch(instr)); 6373 current_instr = (current_instr & ~kOpcodeMask) | new_opcode;
6285 uint32_t opcode = Assembler::GetOpcodeField(instr); 6374 masm_.emit(current_instr);
6286 // Currently only the 'eq' and 'ne' cond values are supported and the simple
6287 // branch instructions (with opcode being the branch type).
6288 // There are some special cases (see Assembler::IsBranch()) so extending this
6289 // would be tricky.
6290 DCHECK(opcode == BEQ ||
6291 opcode == BNE ||
6292 opcode == BLEZ ||
6293 opcode == BGTZ ||
6294 opcode == BEQL ||
6295 opcode == BNEL ||
6296 opcode == BLEZL ||
6297 opcode == BGTZL);
6298 opcode = (cond == eq) ? BEQ : BNE;
6299 instr = (instr & ~kOpcodeMask) | opcode;
6300 masm_.emit(instr);
6301 } 6375 }
6302 6376
6303 6377
6304 void MacroAssembler::TruncatingDiv(Register result, 6378 void MacroAssembler::TruncatingDiv(Register result,
6305 Register dividend, 6379 Register dividend,
6306 int32_t divisor) { 6380 int32_t divisor) {
6307 DCHECK(!dividend.is(result)); 6381 DCHECK(!dividend.is(result));
6308 DCHECK(!dividend.is(at)); 6382 DCHECK(!dividend.is(at));
6309 DCHECK(!result.is(at)); 6383 DCHECK(!result.is(at));
6310 base::MagicNumbersForDivision<uint32_t> mag = 6384 base::MagicNumbersForDivision<uint32_t> mag =
(...skipping 10 matching lines...) Expand all
6321 if (mag.shift > 0) sra(result, result, mag.shift); 6395 if (mag.shift > 0) sra(result, result, mag.shift);
6322 srl(at, dividend, 31); 6396 srl(at, dividend, 31);
6323 Addu(result, result, Operand(at)); 6397 Addu(result, result, Operand(at));
6324 } 6398 }
6325 6399
6326 6400
6327 } // namespace internal 6401 } // namespace internal
6328 } // namespace v8 6402 } // namespace v8
6329 6403
6330 #endif // V8_TARGET_ARCH_MIPS64 6404 #endif // V8_TARGET_ARCH_MIPS64
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698