OLD | NEW |
1 | 1 |
2 // Copyright 2012 the V8 project authors. All rights reserved. | 2 // Copyright 2012 the V8 project authors. All rights reserved. |
3 // Use of this source code is governed by a BSD-style license that can be | 3 // Use of this source code is governed by a BSD-style license that can be |
4 // found in the LICENSE file. | 4 // found in the LICENSE file. |
5 | 5 |
6 #include <limits.h> // For LONG_MIN, LONG_MAX. | 6 #include <limits.h> // For LONG_MIN, LONG_MAX. |
7 | 7 |
8 #if V8_TARGET_ARCH_MIPS | 8 #if V8_TARGET_ARCH_MIPS |
9 | 9 |
10 #include "src/base/bits.h" | 10 #include "src/base/bits.h" |
(...skipping 1920 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1931 | 1931 |
1932 | 1932 |
1933 // Emulated condtional branches do not emit a nop in the branch delay slot. | 1933 // Emulated condtional branches do not emit a nop in the branch delay slot. |
1934 // | 1934 // |
1935 // BRANCH_ARGS_CHECK checks that conditional jump arguments are correct. | 1935 // BRANCH_ARGS_CHECK checks that conditional jump arguments are correct. |
1936 #define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \ | 1936 #define BRANCH_ARGS_CHECK(cond, rs, rt) DCHECK( \ |
1937 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \ | 1937 (cond == cc_always && rs.is(zero_reg) && rt.rm().is(zero_reg)) || \ |
1938 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg)))) | 1938 (cond != cc_always && (!rs.is(zero_reg) || !rt.rm().is(zero_reg)))) |
1939 | 1939 |
1940 | 1940 |
1941 void MacroAssembler::Branch(int16_t offset, BranchDelaySlot bdslot) { | 1941 void MacroAssembler::Branch(int32_t offset, BranchDelaySlot bdslot) { |
| 1942 DCHECK(IsMipsArchVariant(kMips32r6) ? is_int26(offset) : is_int16(offset)); |
1942 BranchShort(offset, bdslot); | 1943 BranchShort(offset, bdslot); |
1943 } | 1944 } |
1944 | 1945 |
1945 | 1946 |
1946 void MacroAssembler::Branch(int16_t offset, Condition cond, Register rs, | 1947 void MacroAssembler::Branch(int32_t offset, Condition cond, Register rs, |
1947 const Operand& rt, | 1948 const Operand& rt, BranchDelaySlot bdslot) { |
1948 BranchDelaySlot bdslot) { | 1949 bool is_near = BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot); |
1949 BranchShort(offset, cond, rs, rt, bdslot); | 1950 DCHECK(is_near); |
| 1951 USE(is_near); |
1950 } | 1952 } |
1951 | 1953 |
1952 | 1954 |
1953 void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) { | 1955 void MacroAssembler::Branch(Label* L, BranchDelaySlot bdslot) { |
1954 if (L->is_bound()) { | 1956 if (L->is_bound()) { |
1955 if (is_near(L)) { | 1957 if (is_near_branch(L)) { |
1956 BranchShort(L, bdslot); | 1958 BranchShort(L, bdslot); |
1957 } else { | 1959 } else { |
1958 Jr(L, bdslot); | 1960 Jr(L, bdslot); |
1959 } | 1961 } |
1960 } else { | 1962 } else { |
1961 if (is_trampoline_emitted()) { | 1963 if (is_trampoline_emitted()) { |
1962 Jr(L, bdslot); | 1964 Jr(L, bdslot); |
1963 } else { | 1965 } else { |
1964 BranchShort(L, bdslot); | 1966 BranchShort(L, bdslot); |
1965 } | 1967 } |
1966 } | 1968 } |
1967 } | 1969 } |
1968 | 1970 |
1969 | 1971 |
1970 void MacroAssembler::Branch(Label* L, Condition cond, Register rs, | 1972 void MacroAssembler::Branch(Label* L, Condition cond, Register rs, |
1971 const Operand& rt, | 1973 const Operand& rt, |
1972 BranchDelaySlot bdslot) { | 1974 BranchDelaySlot bdslot) { |
1973 if (L->is_bound()) { | 1975 if (L->is_bound()) { |
1974 if (is_near(L)) { | 1976 if (!BranchShortCheck(0, L, cond, rs, rt, bdslot)) { |
1975 BranchShort(L, cond, rs, rt, bdslot); | |
1976 } else { | |
1977 if (cond != cc_always) { | 1977 if (cond != cc_always) { |
1978 Label skip; | 1978 Label skip; |
1979 Condition neg_cond = NegateCondition(cond); | 1979 Condition neg_cond = NegateCondition(cond); |
1980 BranchShort(&skip, neg_cond, rs, rt); | 1980 BranchShort(&skip, neg_cond, rs, rt); |
1981 Jr(L, bdslot); | 1981 Jr(L, bdslot); |
1982 bind(&skip); | 1982 bind(&skip); |
1983 } else { | 1983 } else { |
1984 Jr(L, bdslot); | 1984 Jr(L, bdslot); |
1985 } | 1985 } |
1986 } | 1986 } |
(...skipping 18 matching lines...) Expand all Loading... |
2005 void MacroAssembler::Branch(Label* L, | 2005 void MacroAssembler::Branch(Label* L, |
2006 Condition cond, | 2006 Condition cond, |
2007 Register rs, | 2007 Register rs, |
2008 Heap::RootListIndex index, | 2008 Heap::RootListIndex index, |
2009 BranchDelaySlot bdslot) { | 2009 BranchDelaySlot bdslot) { |
2010 LoadRoot(at, index); | 2010 LoadRoot(at, index); |
2011 Branch(L, cond, rs, Operand(at), bdslot); | 2011 Branch(L, cond, rs, Operand(at), bdslot); |
2012 } | 2012 } |
2013 | 2013 |
2014 | 2014 |
2015 void MacroAssembler::BranchShort(int16_t offset, BranchDelaySlot bdslot) { | 2015 void MacroAssembler::BranchShortHelper(int16_t offset, Label* L, |
| 2016 BranchDelaySlot bdslot) { |
| 2017 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
2016 b(offset); | 2018 b(offset); |
2017 | 2019 |
2018 // Emit a nop in the branch delay slot if required. | 2020 // Emit a nop in the branch delay slot if required. |
2019 if (bdslot == PROTECT) | 2021 if (bdslot == PROTECT) |
2020 nop(); | 2022 nop(); |
2021 } | 2023 } |
2022 | 2024 |
2023 | 2025 |
2024 void MacroAssembler::BranchShort(int16_t offset, Condition cond, Register rs, | 2026 void MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L) { |
2025 const Operand& rt, | 2027 offset = GetOffset(offset, L, OffsetSize::kOffset26); |
2026 BranchDelaySlot bdslot) { | 2028 bc(offset); |
2027 BRANCH_ARGS_CHECK(cond, rs, rt); | 2029 } |
2028 DCHECK(!rs.is(zero_reg)); | 2030 |
| 2031 |
| 2032 void MacroAssembler::BranchShort(int32_t offset, BranchDelaySlot bdslot) { |
| 2033 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
| 2034 DCHECK(is_int26(offset)); |
| 2035 BranchShortHelperR6(offset, nullptr); |
| 2036 } else { |
| 2037 DCHECK(is_int16(offset)); |
| 2038 BranchShortHelper(offset, nullptr, bdslot); |
| 2039 } |
| 2040 } |
| 2041 |
| 2042 |
| 2043 void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) { |
| 2044 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
| 2045 BranchShortHelperR6(0, L); |
| 2046 } else { |
| 2047 BranchShortHelper(0, L, bdslot); |
| 2048 } |
| 2049 } |
| 2050 |
| 2051 |
| 2052 static inline bool IsZero(const Operand& rt) { |
| 2053 if (rt.is_reg()) { |
| 2054 return rt.rm().is(zero_reg); |
| 2055 } else { |
| 2056 return rt.immediate() == 0; |
| 2057 } |
| 2058 } |
| 2059 |
| 2060 |
| 2061 int32_t MacroAssembler::GetOffset(int32_t offset, Label* L, OffsetSize bits) { |
| 2062 if (L) { |
| 2063 offset = branch_offset_helper(L, bits) >> 2; |
| 2064 } else { |
| 2065 DCHECK(is_intn(offset, bits)); |
| 2066 } |
| 2067 return offset; |
| 2068 } |
| 2069 |
| 2070 |
| 2071 Register MacroAssembler::GetRtAsRegisterHelper(const Operand& rt, |
| 2072 Register scratch) { |
2029 Register r2 = no_reg; | 2073 Register r2 = no_reg; |
| 2074 if (rt.is_reg()) { |
| 2075 r2 = rt.rm_; |
| 2076 } else { |
| 2077 r2 = scratch; |
| 2078 li(r2, rt); |
| 2079 } |
| 2080 |
| 2081 return r2; |
| 2082 } |
| 2083 |
| 2084 |
| 2085 bool MacroAssembler::BranchShortHelperR6(int32_t offset, Label* L, |
| 2086 Condition cond, Register rs, |
| 2087 const Operand& rt) { |
| 2088 Register scratch = rs.is(at) ? t8 : at; |
| 2089 OffsetSize bits = OffsetSize::kOffset16; |
| 2090 |
| 2091 // Be careful to always use shifted_branch_offset only just before the |
| 2092 // branch instruction, as the location will be remember for patching the |
| 2093 // target. |
| 2094 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 2095 switch (cond) { |
| 2096 case cc_always: |
| 2097 bits = OffsetSize::kOffset26; |
| 2098 if (!is_near(L, bits)) return false; |
| 2099 offset = GetOffset(offset, L, bits); |
| 2100 bc(offset); |
| 2101 break; |
| 2102 case eq: |
| 2103 if (rs.code() == rt.rm_.reg_code) { |
| 2104 // Pre R6 beq is used here to make the code patchable. Otherwise bc |
| 2105 // should be used which has no condition field so is not patchable. |
| 2106 bits = OffsetSize::kOffset16; |
| 2107 if (!is_near(L, bits)) return false; |
| 2108 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2109 offset = GetOffset(offset, L, bits); |
| 2110 beq(rs, scratch, offset); |
| 2111 nop(); |
| 2112 } else if (IsZero(rt)) { |
| 2113 bits = OffsetSize::kOffset21; |
| 2114 if (!is_near(L, bits)) return false; |
| 2115 offset = GetOffset(offset, L, bits); |
| 2116 beqzc(rs, offset); |
| 2117 } else { |
| 2118 // We don't want any other register but scratch clobbered. |
| 2119 bits = OffsetSize::kOffset16; |
| 2120 if (!is_near(L, bits)) return false; |
| 2121 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2122 offset = GetOffset(offset, L, bits); |
| 2123 beqc(rs, scratch, offset); |
| 2124 } |
| 2125 break; |
| 2126 case ne: |
| 2127 if (rs.code() == rt.rm_.reg_code) { |
| 2128 // Pre R6 bne is used here to make the code patchable. Otherwise we |
| 2129 // should not generate any instruction. |
| 2130 bits = OffsetSize::kOffset16; |
| 2131 if (!is_near(L, bits)) return false; |
| 2132 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2133 offset = GetOffset(offset, L, bits); |
| 2134 bne(rs, scratch, offset); |
| 2135 nop(); |
| 2136 } else if (IsZero(rt)) { |
| 2137 bits = OffsetSize::kOffset21; |
| 2138 if (!is_near(L, bits)) return false; |
| 2139 offset = GetOffset(offset, L, bits); |
| 2140 bnezc(rs, offset); |
| 2141 } else { |
| 2142 // We don't want any other register but scratch clobbered. |
| 2143 bits = OffsetSize::kOffset16; |
| 2144 if (!is_near(L, bits)) return false; |
| 2145 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2146 offset = GetOffset(offset, L, bits); |
| 2147 bnec(rs, scratch, offset); |
| 2148 } |
| 2149 break; |
| 2150 |
| 2151 // Signed comparison. |
| 2152 case greater: |
| 2153 // rs > rt |
| 2154 if (rs.code() == rt.rm_.reg_code) { |
| 2155 break; // No code needs to be emitted. |
| 2156 } else if (rs.is(zero_reg)) { |
| 2157 bits = OffsetSize::kOffset16; |
| 2158 if (!is_near(L, bits)) return false; |
| 2159 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2160 offset = GetOffset(offset, L, bits); |
| 2161 bltzc(scratch, offset); |
| 2162 } else if (IsZero(rt)) { |
| 2163 bits = OffsetSize::kOffset16; |
| 2164 if (!is_near(L, bits)) return false; |
| 2165 offset = GetOffset(offset, L, bits); |
| 2166 bgtzc(rs, offset); |
| 2167 } else { |
| 2168 bits = OffsetSize::kOffset16; |
| 2169 if (!is_near(L, bits)) return false; |
| 2170 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2171 DCHECK(!rs.is(scratch)); |
| 2172 offset = GetOffset(offset, L, bits); |
| 2173 bltc(scratch, rs, offset); |
| 2174 } |
| 2175 break; |
| 2176 case greater_equal: |
| 2177 // rs >= rt |
| 2178 if (rs.code() == rt.rm_.reg_code) { |
| 2179 bits = OffsetSize::kOffset26; |
| 2180 if (!is_near(L, bits)) return false; |
| 2181 offset = GetOffset(offset, L, bits); |
| 2182 bc(offset); |
| 2183 } else if (rs.is(zero_reg)) { |
| 2184 bits = OffsetSize::kOffset16; |
| 2185 if (!is_near(L, bits)) return false; |
| 2186 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2187 offset = GetOffset(offset, L, bits); |
| 2188 blezc(scratch, offset); |
| 2189 } else if (IsZero(rt)) { |
| 2190 bits = OffsetSize::kOffset16; |
| 2191 if (!is_near(L, bits)) return false; |
| 2192 offset = GetOffset(offset, L, bits); |
| 2193 bgezc(rs, offset); |
| 2194 } else { |
| 2195 bits = OffsetSize::kOffset16; |
| 2196 if (!is_near(L, bits)) return false; |
| 2197 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2198 DCHECK(!rs.is(scratch)); |
| 2199 offset = GetOffset(offset, L, bits); |
| 2200 bgec(rs, scratch, offset); |
| 2201 } |
| 2202 break; |
| 2203 case less: |
| 2204 // rs < rt |
| 2205 if (rs.code() == rt.rm_.reg_code) { |
| 2206 break; // No code needs to be emitted. |
| 2207 } else if (rs.is(zero_reg)) { |
| 2208 bits = OffsetSize::kOffset16; |
| 2209 if (!is_near(L, bits)) return false; |
| 2210 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2211 offset = GetOffset(offset, L, bits); |
| 2212 bgtzc(scratch, offset); |
| 2213 } else if (IsZero(rt)) { |
| 2214 bits = OffsetSize::kOffset16; |
| 2215 if (!is_near(L, bits)) return false; |
| 2216 offset = GetOffset(offset, L, bits); |
| 2217 bltzc(rs, offset); |
| 2218 } else { |
| 2219 bits = OffsetSize::kOffset16; |
| 2220 if (!is_near(L, bits)) return false; |
| 2221 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2222 DCHECK(!rs.is(scratch)); |
| 2223 offset = GetOffset(offset, L, bits); |
| 2224 bltc(rs, scratch, offset); |
| 2225 } |
| 2226 break; |
| 2227 case less_equal: |
| 2228 // rs <= rt |
| 2229 if (rs.code() == rt.rm_.reg_code) { |
| 2230 bits = OffsetSize::kOffset26; |
| 2231 if (!is_near(L, bits)) return false; |
| 2232 offset = GetOffset(offset, L, bits); |
| 2233 bc(offset); |
| 2234 } else if (rs.is(zero_reg)) { |
| 2235 bits = OffsetSize::kOffset16; |
| 2236 if (!is_near(L, bits)) return false; |
| 2237 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2238 offset = GetOffset(offset, L, bits); |
| 2239 bgezc(scratch, offset); |
| 2240 } else if (IsZero(rt)) { |
| 2241 bits = OffsetSize::kOffset16; |
| 2242 if (!is_near(L, bits)) return false; |
| 2243 offset = GetOffset(offset, L, bits); |
| 2244 blezc(rs, offset); |
| 2245 } else { |
| 2246 bits = OffsetSize::kOffset16; |
| 2247 if (!is_near(L, bits)) return false; |
| 2248 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2249 DCHECK(!rs.is(scratch)); |
| 2250 offset = GetOffset(offset, L, bits); |
| 2251 bgec(scratch, rs, offset); |
| 2252 } |
| 2253 break; |
| 2254 |
| 2255 // Unsigned comparison. |
| 2256 case Ugreater: |
| 2257 // rs > rt |
| 2258 if (rs.code() == rt.rm_.reg_code) { |
| 2259 break; // No code needs to be emitted. |
| 2260 } else if (rs.is(zero_reg)) { |
| 2261 bits = OffsetSize::kOffset21; |
| 2262 if (!is_near(L, bits)) return false; |
| 2263 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2264 offset = GetOffset(offset, L, bits); |
| 2265 bnezc(scratch, offset); |
| 2266 } else if (IsZero(rt)) { |
| 2267 bits = OffsetSize::kOffset21; |
| 2268 if (!is_near(L, bits)) return false; |
| 2269 offset = GetOffset(offset, L, bits); |
| 2270 bnezc(rs, offset); |
| 2271 } else { |
| 2272 bits = OffsetSize::kOffset16; |
| 2273 if (!is_near(L, bits)) return false; |
| 2274 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2275 DCHECK(!rs.is(scratch)); |
| 2276 offset = GetOffset(offset, L, bits); |
| 2277 bltuc(scratch, rs, offset); |
| 2278 } |
| 2279 break; |
| 2280 case Ugreater_equal: |
| 2281 // rs >= rt |
| 2282 if (rs.code() == rt.rm_.reg_code) { |
| 2283 bits = OffsetSize::kOffset26; |
| 2284 if (!is_near(L, bits)) return false; |
| 2285 offset = GetOffset(offset, L, bits); |
| 2286 bc(offset); |
| 2287 } else if (rs.is(zero_reg)) { |
| 2288 bits = OffsetSize::kOffset21; |
| 2289 if (!is_near(L, bits)) return false; |
| 2290 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2291 offset = GetOffset(offset, L, bits); |
| 2292 beqzc(scratch, offset); |
| 2293 } else if (IsZero(rt)) { |
| 2294 bits = OffsetSize::kOffset26; |
| 2295 if (!is_near(L, bits)) return false; |
| 2296 offset = GetOffset(offset, L, bits); |
| 2297 bc(offset); |
| 2298 } else { |
| 2299 bits = OffsetSize::kOffset16; |
| 2300 if (!is_near(L, bits)) return false; |
| 2301 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2302 DCHECK(!rs.is(scratch)); |
| 2303 offset = GetOffset(offset, L, bits); |
| 2304 bgeuc(rs, scratch, offset); |
| 2305 } |
| 2306 break; |
| 2307 case Uless: |
| 2308 // rs < rt |
| 2309 if (rs.code() == rt.rm_.reg_code) { |
| 2310 break; // No code needs to be emitted. |
| 2311 } else if (rs.is(zero_reg)) { |
| 2312 bits = OffsetSize::kOffset21; |
| 2313 if (!is_near(L, bits)) return false; |
| 2314 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2315 offset = GetOffset(offset, L, bits); |
| 2316 bnezc(scratch, offset); |
| 2317 } else if (IsZero(rt)) { |
| 2318 break; // No code needs to be emitted. |
| 2319 } else { |
| 2320 bits = OffsetSize::kOffset16; |
| 2321 if (!is_near(L, bits)) return false; |
| 2322 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2323 DCHECK(!rs.is(scratch)); |
| 2324 offset = GetOffset(offset, L, bits); |
| 2325 bltuc(rs, scratch, offset); |
| 2326 } |
| 2327 break; |
| 2328 case Uless_equal: |
| 2329 // rs <= rt |
| 2330 if (rs.code() == rt.rm_.reg_code) { |
| 2331 bits = OffsetSize::kOffset26; |
| 2332 if (!is_near(L, bits)) return false; |
| 2333 offset = GetOffset(offset, L, bits); |
| 2334 bc(offset); |
| 2335 } else if (rs.is(zero_reg)) { |
| 2336 bits = OffsetSize::kOffset26; |
| 2337 if (!is_near(L, bits)) return false; |
| 2338 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2339 offset = GetOffset(offset, L, bits); |
| 2340 bc(offset); |
| 2341 } else if (IsZero(rt)) { |
| 2342 bits = OffsetSize::kOffset21; |
| 2343 if (!is_near(L, bits)) return false; |
| 2344 offset = GetOffset(offset, L, bits); |
| 2345 beqzc(rs, offset); |
| 2346 } else { |
| 2347 bits = OffsetSize::kOffset16; |
| 2348 if (!is_near(L, bits)) return false; |
| 2349 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2350 DCHECK(!rs.is(scratch)); |
| 2351 offset = GetOffset(offset, L, bits); |
| 2352 bgeuc(scratch, rs, offset); |
| 2353 } |
| 2354 break; |
| 2355 default: |
| 2356 UNREACHABLE(); |
| 2357 } |
| 2358 return true; |
| 2359 } |
| 2360 |
| 2361 |
| 2362 bool MacroAssembler::BranchShortHelper(int16_t offset, Label* L, Condition cond, |
| 2363 Register rs, const Operand& rt, |
| 2364 BranchDelaySlot bdslot) { |
| 2365 if (!is_near(L, OffsetSize::kOffset16)) return false; |
| 2366 |
2030 Register scratch = at; | 2367 Register scratch = at; |
2031 | 2368 int32_t offset32; |
2032 if (rt.is_reg()) { | 2369 |
2033 // NOTE: 'at' can be clobbered by Branch but it is legal to use it as rs or | 2370 // Be careful to always use shifted_branch_offset only just before the |
2034 // rt. | 2371 // branch instruction, as the location will be remember for patching the |
2035 BlockTrampolinePoolScope block_trampoline_pool(this); | 2372 // target. |
2036 r2 = rt.rm_; | 2373 BlockTrampolinePoolScope block_trampoline_pool(this); |
2037 switch (cond) { | 2374 switch (cond) { |
2038 case cc_always: | 2375 case cc_always: |
2039 b(offset); | 2376 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2040 break; | 2377 b(offset32); |
2041 case eq: | 2378 break; |
2042 beq(rs, r2, offset); | 2379 case eq: |
2043 break; | 2380 if (IsZero(rt)) { |
2044 case ne: | 2381 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2045 bne(rs, r2, offset); | 2382 beq(rs, zero_reg, offset32); |
2046 break; | 2383 } else { |
2047 // Signed comparison. | 2384 // We don't want any other register but scratch clobbered. |
2048 case greater: | 2385 scratch = GetRtAsRegisterHelper(rt, scratch); |
2049 if (r2.is(zero_reg)) { | 2386 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2050 bgtz(rs, offset); | 2387 beq(rs, scratch, offset32); |
2051 } else { | 2388 } |
2052 slt(scratch, r2, rs); | 2389 break; |
2053 bne(scratch, zero_reg, offset); | 2390 case ne: |
2054 } | 2391 if (IsZero(rt)) { |
2055 break; | 2392 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2056 case greater_equal: | 2393 bne(rs, zero_reg, offset32); |
2057 if (r2.is(zero_reg)) { | 2394 } else { |
2058 bgez(rs, offset); | 2395 // We don't want any other register but scratch clobbered. |
2059 } else { | 2396 scratch = GetRtAsRegisterHelper(rt, scratch); |
2060 slt(scratch, rs, r2); | 2397 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2061 beq(scratch, zero_reg, offset); | 2398 bne(rs, scratch, offset32); |
2062 } | 2399 } |
2063 break; | 2400 break; |
2064 case less: | 2401 |
2065 if (r2.is(zero_reg)) { | 2402 // Signed comparison. |
2066 bltz(rs, offset); | 2403 case greater: |
2067 } else { | 2404 if (IsZero(rt)) { |
2068 slt(scratch, rs, r2); | 2405 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2069 bne(scratch, zero_reg, offset); | 2406 bgtz(rs, offset32); |
2070 } | 2407 } else { |
2071 break; | 2408 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
2072 case less_equal: | 2409 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2073 if (r2.is(zero_reg)) { | 2410 bne(scratch, zero_reg, offset32); |
2074 blez(rs, offset); | 2411 } |
2075 } else { | 2412 break; |
2076 slt(scratch, r2, rs); | 2413 case greater_equal: |
2077 beq(scratch, zero_reg, offset); | 2414 if (IsZero(rt)) { |
2078 } | 2415 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2079 break; | 2416 bgez(rs, offset32); |
2080 // Unsigned comparison. | 2417 } else { |
2081 case Ugreater: | 2418 Slt(scratch, rs, rt); |
2082 if (r2.is(zero_reg)) { | 2419 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2083 bne(rs, zero_reg, offset); | 2420 beq(scratch, zero_reg, offset32); |
2084 } else { | 2421 } |
2085 sltu(scratch, r2, rs); | 2422 break; |
2086 bne(scratch, zero_reg, offset); | 2423 case less: |
2087 } | 2424 if (IsZero(rt)) { |
2088 break; | 2425 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2089 case Ugreater_equal: | 2426 bltz(rs, offset32); |
2090 if (r2.is(zero_reg)) { | 2427 } else { |
2091 b(offset); | 2428 Slt(scratch, rs, rt); |
2092 } else { | 2429 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2093 sltu(scratch, rs, r2); | 2430 bne(scratch, zero_reg, offset32); |
2094 beq(scratch, zero_reg, offset); | 2431 } |
2095 } | 2432 break; |
2096 break; | 2433 case less_equal: |
2097 case Uless: | 2434 if (IsZero(rt)) { |
2098 if (r2.is(zero_reg)) { | 2435 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2099 // No code needs to be emitted. | 2436 blez(rs, offset32); |
2100 return; | 2437 } else { |
2101 } else { | 2438 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
2102 sltu(scratch, rs, r2); | 2439 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2103 bne(scratch, zero_reg, offset); | 2440 beq(scratch, zero_reg, offset32); |
2104 } | 2441 } |
2105 break; | 2442 break; |
2106 case Uless_equal: | 2443 |
2107 if (r2.is(zero_reg)) { | 2444 // Unsigned comparison. |
2108 beq(rs, zero_reg, offset); | 2445 case Ugreater: |
2109 } else { | 2446 if (IsZero(rt)) { |
2110 sltu(scratch, r2, rs); | 2447 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2111 beq(scratch, zero_reg, offset); | 2448 bne(rs, zero_reg, offset32); |
2112 } | 2449 } else { |
2113 break; | 2450 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
2114 default: | 2451 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2115 UNREACHABLE(); | 2452 bne(scratch, zero_reg, offset32); |
2116 } | 2453 } |
2117 } else { | 2454 break; |
2118 // Be careful to always use shifted_branch_offset only just before the | 2455 case Ugreater_equal: |
2119 // branch instruction, as the location will be remember for patching the | 2456 if (IsZero(rt)) { |
2120 // target. | 2457 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2121 BlockTrampolinePoolScope block_trampoline_pool(this); | 2458 b(offset32); |
2122 switch (cond) { | 2459 } else { |
2123 case cc_always: | 2460 Sltu(scratch, rs, rt); |
2124 b(offset); | 2461 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2125 break; | 2462 beq(scratch, zero_reg, offset32); |
2126 case eq: | 2463 } |
2127 if (rt.imm32_ == 0) { | 2464 break; |
2128 beq(rs, zero_reg, offset); | 2465 case Uless: |
2129 } else { | 2466 if (IsZero(rt)) { |
2130 // We don't want any other register but scratch clobbered. | 2467 return true; // No code needs to be emitted. |
2131 DCHECK(!scratch.is(rs)); | 2468 } else { |
2132 r2 = scratch; | 2469 Sltu(scratch, rs, rt); |
2133 li(r2, rt); | 2470 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2134 beq(rs, r2, offset); | 2471 bne(scratch, zero_reg, offset32); |
2135 } | 2472 } |
2136 break; | 2473 break; |
2137 case ne: | 2474 case Uless_equal: |
2138 if (rt.imm32_ == 0) { | 2475 if (IsZero(rt)) { |
2139 bne(rs, zero_reg, offset); | 2476 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2140 } else { | 2477 beq(rs, zero_reg, offset32); |
2141 // We don't want any other register but scratch clobbered. | 2478 } else { |
2142 DCHECK(!scratch.is(rs)); | 2479 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
2143 r2 = scratch; | 2480 offset32 = GetOffset(offset, L, OffsetSize::kOffset16); |
2144 li(r2, rt); | 2481 beq(scratch, zero_reg, offset32); |
2145 bne(rs, r2, offset); | 2482 } |
2146 } | 2483 break; |
2147 break; | 2484 default: |
2148 // Signed comparison. | 2485 UNREACHABLE(); |
2149 case greater: | 2486 } |
2150 if (rt.imm32_ == 0) { | 2487 |
2151 bgtz(rs, offset); | |
2152 } else { | |
2153 r2 = scratch; | |
2154 li(r2, rt); | |
2155 slt(scratch, r2, rs); | |
2156 bne(scratch, zero_reg, offset); | |
2157 } | |
2158 break; | |
2159 case greater_equal: | |
2160 if (rt.imm32_ == 0) { | |
2161 bgez(rs, offset); | |
2162 } else if (is_int16(rt.imm32_)) { | |
2163 slti(scratch, rs, rt.imm32_); | |
2164 beq(scratch, zero_reg, offset); | |
2165 } else { | |
2166 r2 = scratch; | |
2167 li(r2, rt); | |
2168 slt(scratch, rs, r2); | |
2169 beq(scratch, zero_reg, offset); | |
2170 } | |
2171 break; | |
2172 case less: | |
2173 if (rt.imm32_ == 0) { | |
2174 bltz(rs, offset); | |
2175 } else if (is_int16(rt.imm32_)) { | |
2176 slti(scratch, rs, rt.imm32_); | |
2177 bne(scratch, zero_reg, offset); | |
2178 } else { | |
2179 r2 = scratch; | |
2180 li(r2, rt); | |
2181 slt(scratch, rs, r2); | |
2182 bne(scratch, zero_reg, offset); | |
2183 } | |
2184 break; | |
2185 case less_equal: | |
2186 if (rt.imm32_ == 0) { | |
2187 blez(rs, offset); | |
2188 } else { | |
2189 r2 = scratch; | |
2190 li(r2, rt); | |
2191 slt(scratch, r2, rs); | |
2192 beq(scratch, zero_reg, offset); | |
2193 } | |
2194 break; | |
2195 // Unsigned comparison. | |
2196 case Ugreater: | |
2197 if (rt.imm32_ == 0) { | |
2198 bne(rs, zero_reg, offset); | |
2199 } else { | |
2200 r2 = scratch; | |
2201 li(r2, rt); | |
2202 sltu(scratch, r2, rs); | |
2203 bne(scratch, zero_reg, offset); | |
2204 } | |
2205 break; | |
2206 case Ugreater_equal: | |
2207 if (rt.imm32_ == 0) { | |
2208 b(offset); | |
2209 } else if (is_int16(rt.imm32_)) { | |
2210 sltiu(scratch, rs, rt.imm32_); | |
2211 beq(scratch, zero_reg, offset); | |
2212 } else { | |
2213 r2 = scratch; | |
2214 li(r2, rt); | |
2215 sltu(scratch, rs, r2); | |
2216 beq(scratch, zero_reg, offset); | |
2217 } | |
2218 break; | |
2219 case Uless: | |
2220 if (rt.imm32_ == 0) { | |
2221 // No code needs to be emitted. | |
2222 return; | |
2223 } else if (is_int16(rt.imm32_)) { | |
2224 sltiu(scratch, rs, rt.imm32_); | |
2225 bne(scratch, zero_reg, offset); | |
2226 } else { | |
2227 r2 = scratch; | |
2228 li(r2, rt); | |
2229 sltu(scratch, rs, r2); | |
2230 bne(scratch, zero_reg, offset); | |
2231 } | |
2232 break; | |
2233 case Uless_equal: | |
2234 if (rt.imm32_ == 0) { | |
2235 beq(rs, zero_reg, offset); | |
2236 } else { | |
2237 r2 = scratch; | |
2238 li(r2, rt); | |
2239 sltu(scratch, r2, rs); | |
2240 beq(scratch, zero_reg, offset); | |
2241 } | |
2242 break; | |
2243 default: | |
2244 UNREACHABLE(); | |
2245 } | |
2246 } | |
2247 // Emit a nop in the branch delay slot if required. | 2488 // Emit a nop in the branch delay slot if required. |
2248 if (bdslot == PROTECT) | 2489 if (bdslot == PROTECT) |
2249 nop(); | 2490 nop(); |
2250 } | 2491 |
2251 | 2492 return true; |
2252 | 2493 } |
2253 void MacroAssembler::BranchShort(Label* L, BranchDelaySlot bdslot) { | 2494 |
2254 // We use branch_offset as an argument for the branch instructions to be sure | 2495 |
2255 // it is called just before generating the branch instruction, as needed. | 2496 bool MacroAssembler::BranchShortCheck(int32_t offset, Label* L, Condition cond, |
2256 | 2497 Register rs, const Operand& rt, |
2257 b(shifted_branch_offset(L, false)); | 2498 BranchDelaySlot bdslot) { |
2258 | |
2259 // Emit a nop in the branch delay slot if required. | |
2260 if (bdslot == PROTECT) | |
2261 nop(); | |
2262 } | |
2263 | |
2264 | |
2265 void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs, | |
2266 const Operand& rt, | |
2267 BranchDelaySlot bdslot) { | |
2268 BRANCH_ARGS_CHECK(cond, rs, rt); | 2499 BRANCH_ARGS_CHECK(cond, rs, rt); |
2269 | 2500 |
2270 int32_t offset = 0; | 2501 if (!L) { |
2271 Register r2 = no_reg; | 2502 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
2272 Register scratch = at; | 2503 DCHECK(is_int26(offset)); |
2273 if (rt.is_reg()) { | 2504 return BranchShortHelperR6(offset, nullptr, cond, rs, rt); |
2274 BlockTrampolinePoolScope block_trampoline_pool(this); | 2505 } else { |
2275 r2 = rt.rm_; | 2506 DCHECK(is_int16(offset)); |
2276 // Be careful to always use shifted_branch_offset only just before the | 2507 return BranchShortHelper(offset, nullptr, cond, rs, rt, bdslot); |
2277 // branch instruction, as the location will be remember for patching the | |
2278 // target. | |
2279 switch (cond) { | |
2280 case cc_always: | |
2281 offset = shifted_branch_offset(L, false); | |
2282 b(offset); | |
2283 break; | |
2284 case eq: | |
2285 offset = shifted_branch_offset(L, false); | |
2286 beq(rs, r2, offset); | |
2287 break; | |
2288 case ne: | |
2289 offset = shifted_branch_offset(L, false); | |
2290 bne(rs, r2, offset); | |
2291 break; | |
2292 // Signed comparison. | |
2293 case greater: | |
2294 if (r2.is(zero_reg)) { | |
2295 offset = shifted_branch_offset(L, false); | |
2296 bgtz(rs, offset); | |
2297 } else { | |
2298 slt(scratch, r2, rs); | |
2299 offset = shifted_branch_offset(L, false); | |
2300 bne(scratch, zero_reg, offset); | |
2301 } | |
2302 break; | |
2303 case greater_equal: | |
2304 if (r2.is(zero_reg)) { | |
2305 offset = shifted_branch_offset(L, false); | |
2306 bgez(rs, offset); | |
2307 } else { | |
2308 slt(scratch, rs, r2); | |
2309 offset = shifted_branch_offset(L, false); | |
2310 beq(scratch, zero_reg, offset); | |
2311 } | |
2312 break; | |
2313 case less: | |
2314 if (r2.is(zero_reg)) { | |
2315 offset = shifted_branch_offset(L, false); | |
2316 bltz(rs, offset); | |
2317 } else { | |
2318 slt(scratch, rs, r2); | |
2319 offset = shifted_branch_offset(L, false); | |
2320 bne(scratch, zero_reg, offset); | |
2321 } | |
2322 break; | |
2323 case less_equal: | |
2324 if (r2.is(zero_reg)) { | |
2325 offset = shifted_branch_offset(L, false); | |
2326 blez(rs, offset); | |
2327 } else { | |
2328 slt(scratch, r2, rs); | |
2329 offset = shifted_branch_offset(L, false); | |
2330 beq(scratch, zero_reg, offset); | |
2331 } | |
2332 break; | |
2333 // Unsigned comparison. | |
2334 case Ugreater: | |
2335 if (r2.is(zero_reg)) { | |
2336 offset = shifted_branch_offset(L, false); | |
2337 bne(rs, zero_reg, offset); | |
2338 } else { | |
2339 sltu(scratch, r2, rs); | |
2340 offset = shifted_branch_offset(L, false); | |
2341 bne(scratch, zero_reg, offset); | |
2342 } | |
2343 break; | |
2344 case Ugreater_equal: | |
2345 if (r2.is(zero_reg)) { | |
2346 offset = shifted_branch_offset(L, false); | |
2347 b(offset); | |
2348 } else { | |
2349 sltu(scratch, rs, r2); | |
2350 offset = shifted_branch_offset(L, false); | |
2351 beq(scratch, zero_reg, offset); | |
2352 } | |
2353 break; | |
2354 case Uless: | |
2355 if (r2.is(zero_reg)) { | |
2356 // No code needs to be emitted. | |
2357 return; | |
2358 } else { | |
2359 sltu(scratch, rs, r2); | |
2360 offset = shifted_branch_offset(L, false); | |
2361 bne(scratch, zero_reg, offset); | |
2362 } | |
2363 break; | |
2364 case Uless_equal: | |
2365 if (r2.is(zero_reg)) { | |
2366 offset = shifted_branch_offset(L, false); | |
2367 beq(rs, zero_reg, offset); | |
2368 } else { | |
2369 sltu(scratch, r2, rs); | |
2370 offset = shifted_branch_offset(L, false); | |
2371 beq(scratch, zero_reg, offset); | |
2372 } | |
2373 break; | |
2374 default: | |
2375 UNREACHABLE(); | |
2376 } | 2508 } |
2377 } else { | 2509 } else { |
2378 // Be careful to always use shifted_branch_offset only just before the | 2510 DCHECK(offset == 0); |
2379 // branch instruction, as the location will be remember for patching the | 2511 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
2380 // target. | 2512 return BranchShortHelperR6(0, L, cond, rs, rt); |
2381 BlockTrampolinePoolScope block_trampoline_pool(this); | 2513 } else { |
2382 switch (cond) { | 2514 return BranchShortHelper(0, L, cond, rs, rt, bdslot); |
2383 case cc_always: | |
2384 offset = shifted_branch_offset(L, false); | |
2385 b(offset); | |
2386 break; | |
2387 case eq: | |
2388 if (rt.imm32_ == 0) { | |
2389 offset = shifted_branch_offset(L, false); | |
2390 beq(rs, zero_reg, offset); | |
2391 } else { | |
2392 DCHECK(!scratch.is(rs)); | |
2393 r2 = scratch; | |
2394 li(r2, rt); | |
2395 offset = shifted_branch_offset(L, false); | |
2396 beq(rs, r2, offset); | |
2397 } | |
2398 break; | |
2399 case ne: | |
2400 if (rt.imm32_ == 0) { | |
2401 offset = shifted_branch_offset(L, false); | |
2402 bne(rs, zero_reg, offset); | |
2403 } else { | |
2404 DCHECK(!scratch.is(rs)); | |
2405 r2 = scratch; | |
2406 li(r2, rt); | |
2407 offset = shifted_branch_offset(L, false); | |
2408 bne(rs, r2, offset); | |
2409 } | |
2410 break; | |
2411 // Signed comparison. | |
2412 case greater: | |
2413 if (rt.imm32_ == 0) { | |
2414 offset = shifted_branch_offset(L, false); | |
2415 bgtz(rs, offset); | |
2416 } else { | |
2417 DCHECK(!scratch.is(rs)); | |
2418 r2 = scratch; | |
2419 li(r2, rt); | |
2420 slt(scratch, r2, rs); | |
2421 offset = shifted_branch_offset(L, false); | |
2422 bne(scratch, zero_reg, offset); | |
2423 } | |
2424 break; | |
2425 case greater_equal: | |
2426 if (rt.imm32_ == 0) { | |
2427 offset = shifted_branch_offset(L, false); | |
2428 bgez(rs, offset); | |
2429 } else if (is_int16(rt.imm32_)) { | |
2430 slti(scratch, rs, rt.imm32_); | |
2431 offset = shifted_branch_offset(L, false); | |
2432 beq(scratch, zero_reg, offset); | |
2433 } else { | |
2434 DCHECK(!scratch.is(rs)); | |
2435 r2 = scratch; | |
2436 li(r2, rt); | |
2437 slt(scratch, rs, r2); | |
2438 offset = shifted_branch_offset(L, false); | |
2439 beq(scratch, zero_reg, offset); | |
2440 } | |
2441 break; | |
2442 case less: | |
2443 if (rt.imm32_ == 0) { | |
2444 offset = shifted_branch_offset(L, false); | |
2445 bltz(rs, offset); | |
2446 } else if (is_int16(rt.imm32_)) { | |
2447 slti(scratch, rs, rt.imm32_); | |
2448 offset = shifted_branch_offset(L, false); | |
2449 bne(scratch, zero_reg, offset); | |
2450 } else { | |
2451 DCHECK(!scratch.is(rs)); | |
2452 r2 = scratch; | |
2453 li(r2, rt); | |
2454 slt(scratch, rs, r2); | |
2455 offset = shifted_branch_offset(L, false); | |
2456 bne(scratch, zero_reg, offset); | |
2457 } | |
2458 break; | |
2459 case less_equal: | |
2460 if (rt.imm32_ == 0) { | |
2461 offset = shifted_branch_offset(L, false); | |
2462 blez(rs, offset); | |
2463 } else { | |
2464 DCHECK(!scratch.is(rs)); | |
2465 r2 = scratch; | |
2466 li(r2, rt); | |
2467 slt(scratch, r2, rs); | |
2468 offset = shifted_branch_offset(L, false); | |
2469 beq(scratch, zero_reg, offset); | |
2470 } | |
2471 break; | |
2472 // Unsigned comparison. | |
2473 case Ugreater: | |
2474 if (rt.imm32_ == 0) { | |
2475 offset = shifted_branch_offset(L, false); | |
2476 bne(rs, zero_reg, offset); | |
2477 } else { | |
2478 DCHECK(!scratch.is(rs)); | |
2479 r2 = scratch; | |
2480 li(r2, rt); | |
2481 sltu(scratch, r2, rs); | |
2482 offset = shifted_branch_offset(L, false); | |
2483 bne(scratch, zero_reg, offset); | |
2484 } | |
2485 break; | |
2486 case Ugreater_equal: | |
2487 if (rt.imm32_ == 0) { | |
2488 offset = shifted_branch_offset(L, false); | |
2489 b(offset); | |
2490 } else if (is_int16(rt.imm32_)) { | |
2491 sltiu(scratch, rs, rt.imm32_); | |
2492 offset = shifted_branch_offset(L, false); | |
2493 beq(scratch, zero_reg, offset); | |
2494 } else { | |
2495 DCHECK(!scratch.is(rs)); | |
2496 r2 = scratch; | |
2497 li(r2, rt); | |
2498 sltu(scratch, rs, r2); | |
2499 offset = shifted_branch_offset(L, false); | |
2500 beq(scratch, zero_reg, offset); | |
2501 } | |
2502 break; | |
2503 case Uless: | |
2504 if (rt.imm32_ == 0) { | |
2505 // No code needs to be emitted. | |
2506 return; | |
2507 } else if (is_int16(rt.imm32_)) { | |
2508 sltiu(scratch, rs, rt.imm32_); | |
2509 offset = shifted_branch_offset(L, false); | |
2510 bne(scratch, zero_reg, offset); | |
2511 } else { | |
2512 DCHECK(!scratch.is(rs)); | |
2513 r2 = scratch; | |
2514 li(r2, rt); | |
2515 sltu(scratch, rs, r2); | |
2516 offset = shifted_branch_offset(L, false); | |
2517 bne(scratch, zero_reg, offset); | |
2518 } | |
2519 break; | |
2520 case Uless_equal: | |
2521 if (rt.imm32_ == 0) { | |
2522 offset = shifted_branch_offset(L, false); | |
2523 beq(rs, zero_reg, offset); | |
2524 } else { | |
2525 DCHECK(!scratch.is(rs)); | |
2526 r2 = scratch; | |
2527 li(r2, rt); | |
2528 sltu(scratch, r2, rs); | |
2529 offset = shifted_branch_offset(L, false); | |
2530 beq(scratch, zero_reg, offset); | |
2531 } | |
2532 break; | |
2533 default: | |
2534 UNREACHABLE(); | |
2535 } | 2515 } |
2536 } | 2516 } |
2537 // Check that offset could actually hold on an int16_t. | 2517 return false; |
2538 DCHECK(is_int16(offset)); | 2518 } |
2539 // Emit a nop in the branch delay slot if required. | 2519 |
2540 if (bdslot == PROTECT) | 2520 |
2541 nop(); | 2521 void MacroAssembler::BranchShort(int32_t offset, Condition cond, Register rs, |
2542 } | 2522 const Operand& rt, BranchDelaySlot bdslot) { |
2543 | 2523 BranchShortCheck(offset, nullptr, cond, rs, rt, bdslot); |
2544 | 2524 } |
2545 void MacroAssembler::BranchAndLink(int16_t offset, BranchDelaySlot bdslot) { | 2525 |
| 2526 |
| 2527 void MacroAssembler::BranchShort(Label* L, Condition cond, Register rs, |
| 2528 const Operand& rt, BranchDelaySlot bdslot) { |
| 2529 BranchShortCheck(0, L, cond, rs, rt, bdslot); |
| 2530 } |
| 2531 |
| 2532 |
| 2533 void MacroAssembler::BranchAndLink(int32_t offset, BranchDelaySlot bdslot) { |
2546 BranchAndLinkShort(offset, bdslot); | 2534 BranchAndLinkShort(offset, bdslot); |
2547 } | 2535 } |
2548 | 2536 |
2549 | 2537 |
2550 void MacroAssembler::BranchAndLink(int16_t offset, Condition cond, Register rs, | 2538 void MacroAssembler::BranchAndLink(int32_t offset, Condition cond, Register rs, |
2551 const Operand& rt, | 2539 const Operand& rt, BranchDelaySlot bdslot) { |
2552 BranchDelaySlot bdslot) { | 2540 bool is_near = BranchAndLinkShortCheck(offset, nullptr, cond, rs, rt, bdslot); |
2553 BranchAndLinkShort(offset, cond, rs, rt, bdslot); | 2541 DCHECK(is_near); |
| 2542 USE(is_near); |
2554 } | 2543 } |
2555 | 2544 |
2556 | 2545 |
2557 void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) { | 2546 void MacroAssembler::BranchAndLink(Label* L, BranchDelaySlot bdslot) { |
2558 if (L->is_bound()) { | 2547 if (L->is_bound()) { |
2559 if (is_near(L)) { | 2548 if (is_near_branch(L)) { |
2560 BranchAndLinkShort(L, bdslot); | 2549 BranchAndLinkShort(L, bdslot); |
2561 } else { | 2550 } else { |
2562 Jalr(L, bdslot); | 2551 Jalr(L, bdslot); |
2563 } | 2552 } |
2564 } else { | 2553 } else { |
2565 if (is_trampoline_emitted()) { | 2554 if (is_trampoline_emitted()) { |
2566 Jalr(L, bdslot); | 2555 Jalr(L, bdslot); |
2567 } else { | 2556 } else { |
2568 BranchAndLinkShort(L, bdslot); | 2557 BranchAndLinkShort(L, bdslot); |
2569 } | 2558 } |
2570 } | 2559 } |
2571 } | 2560 } |
2572 | 2561 |
2573 | 2562 |
2574 void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs, | 2563 void MacroAssembler::BranchAndLink(Label* L, Condition cond, Register rs, |
2575 const Operand& rt, | 2564 const Operand& rt, |
2576 BranchDelaySlot bdslot) { | 2565 BranchDelaySlot bdslot) { |
2577 if (L->is_bound()) { | 2566 if (L->is_bound()) { |
2578 if (is_near(L)) { | 2567 if (!BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot)) { |
2579 BranchAndLinkShort(L, cond, rs, rt, bdslot); | |
2580 } else { | |
2581 Label skip; | 2568 Label skip; |
2582 Condition neg_cond = NegateCondition(cond); | 2569 Condition neg_cond = NegateCondition(cond); |
2583 BranchShort(&skip, neg_cond, rs, rt); | 2570 BranchShort(&skip, neg_cond, rs, rt); |
2584 Jalr(L, bdslot); | 2571 Jalr(L, bdslot); |
2585 bind(&skip); | 2572 bind(&skip); |
2586 } | 2573 } |
2587 } else { | 2574 } else { |
2588 if (is_trampoline_emitted()) { | 2575 if (is_trampoline_emitted()) { |
2589 Label skip; | 2576 Label skip; |
2590 Condition neg_cond = NegateCondition(cond); | 2577 Condition neg_cond = NegateCondition(cond); |
2591 BranchShort(&skip, neg_cond, rs, rt); | 2578 BranchShort(&skip, neg_cond, rs, rt); |
2592 Jalr(L, bdslot); | 2579 Jalr(L, bdslot); |
2593 bind(&skip); | 2580 bind(&skip); |
2594 } else { | 2581 } else { |
2595 BranchAndLinkShort(L, cond, rs, rt, bdslot); | 2582 BranchAndLinkShortCheck(0, L, cond, rs, rt, bdslot); |
2596 } | 2583 } |
2597 } | 2584 } |
2598 } | 2585 } |
2599 | 2586 |
2600 | 2587 |
2601 // We need to use a bgezal or bltzal, but they can't be used directly with the | 2588 void MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L, |
2602 // slt instructions. We could use sub or add instead but we would miss overflow | 2589 BranchDelaySlot bdslot) { |
2603 // cases, so we keep slt and add an intermediate third instruction. | 2590 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
2604 void MacroAssembler::BranchAndLinkShort(int16_t offset, | |
2605 BranchDelaySlot bdslot) { | |
2606 bal(offset); | 2591 bal(offset); |
2607 | 2592 |
2608 // Emit a nop in the branch delay slot if required. | 2593 // Emit a nop in the branch delay slot if required. |
2609 if (bdslot == PROTECT) | 2594 if (bdslot == PROTECT) |
2610 nop(); | 2595 nop(); |
2611 } | 2596 } |
2612 | 2597 |
2613 | 2598 |
2614 void MacroAssembler::BranchAndLinkShort(int16_t offset, Condition cond, | 2599 void MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L) { |
2615 Register rs, const Operand& rt, | 2600 offset = GetOffset(offset, L, OffsetSize::kOffset26); |
| 2601 balc(offset); |
| 2602 } |
| 2603 |
| 2604 |
| 2605 void MacroAssembler::BranchAndLinkShort(int32_t offset, |
2616 BranchDelaySlot bdslot) { | 2606 BranchDelaySlot bdslot) { |
2617 BRANCH_ARGS_CHECK(cond, rs, rt); | 2607 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
2618 Register r2 = no_reg; | 2608 DCHECK(is_int26(offset)); |
2619 Register scratch = at; | 2609 BranchAndLinkShortHelperR6(offset, nullptr); |
2620 | |
2621 if (rt.is_reg()) { | |
2622 r2 = rt.rm_; | |
2623 } else if (cond != cc_always) { | |
2624 r2 = scratch; | |
2625 li(r2, rt); | |
2626 } | |
2627 | |
2628 if (!IsMipsArchVariant(kMips32r6)) { | |
2629 BlockTrampolinePoolScope block_trampoline_pool(this); | |
2630 switch (cond) { | |
2631 case cc_always: | |
2632 bal(offset); | |
2633 break; | |
2634 case eq: | |
2635 bne(rs, r2, 2); | |
2636 nop(); | |
2637 bal(offset); | |
2638 break; | |
2639 case ne: | |
2640 beq(rs, r2, 2); | |
2641 nop(); | |
2642 bal(offset); | |
2643 break; | |
2644 | |
2645 // Signed comparison. | |
2646 case greater: | |
2647 slt(scratch, r2, rs); | |
2648 addiu(scratch, scratch, -1); | |
2649 bgezal(scratch, offset); | |
2650 break; | |
2651 case greater_equal: | |
2652 slt(scratch, rs, r2); | |
2653 addiu(scratch, scratch, -1); | |
2654 bltzal(scratch, offset); | |
2655 break; | |
2656 case less: | |
2657 slt(scratch, rs, r2); | |
2658 addiu(scratch, scratch, -1); | |
2659 bgezal(scratch, offset); | |
2660 break; | |
2661 case less_equal: | |
2662 slt(scratch, r2, rs); | |
2663 addiu(scratch, scratch, -1); | |
2664 bltzal(scratch, offset); | |
2665 break; | |
2666 | |
2667 // Unsigned comparison. | |
2668 case Ugreater: | |
2669 sltu(scratch, r2, rs); | |
2670 addiu(scratch, scratch, -1); | |
2671 bgezal(scratch, offset); | |
2672 break; | |
2673 case Ugreater_equal: | |
2674 sltu(scratch, rs, r2); | |
2675 addiu(scratch, scratch, -1); | |
2676 bltzal(scratch, offset); | |
2677 break; | |
2678 case Uless: | |
2679 sltu(scratch, rs, r2); | |
2680 addiu(scratch, scratch, -1); | |
2681 bgezal(scratch, offset); | |
2682 break; | |
2683 case Uless_equal: | |
2684 sltu(scratch, r2, rs); | |
2685 addiu(scratch, scratch, -1); | |
2686 bltzal(scratch, offset); | |
2687 break; | |
2688 | |
2689 default: | |
2690 UNREACHABLE(); | |
2691 } | |
2692 } else { | 2610 } else { |
2693 BlockTrampolinePoolScope block_trampoline_pool(this); | 2611 DCHECK(is_int16(offset)); |
2694 switch (cond) { | 2612 BranchAndLinkShortHelper(offset, nullptr, bdslot); |
2695 case cc_always: | 2613 } |
2696 bal(offset); | 2614 } |
2697 break; | 2615 |
2698 case eq: | 2616 |
2699 bne(rs, r2, 2); | 2617 void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) { |
2700 nop(); | 2618 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
2701 bal(offset); | 2619 BranchAndLinkShortHelperR6(0, L); |
2702 break; | 2620 } else { |
2703 case ne: | 2621 BranchAndLinkShortHelper(0, L, bdslot); |
2704 beq(rs, r2, 2); | 2622 } |
2705 nop(); | 2623 } |
2706 bal(offset); | 2624 |
2707 break; | 2625 |
2708 | 2626 bool MacroAssembler::BranchAndLinkShortHelperR6(int32_t offset, Label* L, |
2709 // Signed comparison. | 2627 Condition cond, Register rs, |
2710 case greater: | 2628 const Operand& rt) { |
2711 // rs > rt | 2629 Register scratch = rs.is(at) ? t8 : at; |
2712 slt(scratch, r2, rs); | 2630 OffsetSize bits = OffsetSize::kOffset16; |
2713 beq(scratch, zero_reg, 2); | 2631 |
2714 nop(); | 2632 BlockTrampolinePoolScope block_trampoline_pool(this); |
2715 bal(offset); | 2633 DCHECK((cond == cc_always && is_int26(offset)) || is_int16(offset)); |
2716 break; | 2634 switch (cond) { |
2717 case greater_equal: | 2635 case cc_always: |
2718 // rs >= rt | 2636 bits = OffsetSize::kOffset26; |
2719 slt(scratch, rs, r2); | 2637 if (!is_near(L, bits)) return false; |
2720 bne(scratch, zero_reg, 2); | 2638 offset = GetOffset(offset, L, bits); |
2721 nop(); | 2639 balc(offset); |
2722 bal(offset); | 2640 break; |
2723 break; | 2641 case eq: |
2724 case less: | 2642 if (!is_near(L, bits)) return false; |
2725 // rs < r2 | 2643 Subu(scratch, rs, rt); |
2726 slt(scratch, rs, r2); | 2644 offset = GetOffset(offset, L, bits); |
2727 bne(scratch, zero_reg, 2); | 2645 beqzalc(scratch, offset); |
2728 nop(); | 2646 break; |
2729 bal(offset); | 2647 case ne: |
2730 break; | 2648 if (!is_near(L, bits)) return false; |
2731 case less_equal: | 2649 Subu(scratch, rs, rt); |
2732 // rs <= r2 | 2650 offset = GetOffset(offset, L, bits); |
2733 slt(scratch, r2, rs); | 2651 bnezalc(scratch, offset); |
2734 bne(scratch, zero_reg, 2); | 2652 break; |
2735 nop(); | 2653 |
2736 bal(offset); | 2654 // Signed comparison. |
2737 break; | 2655 case greater: |
2738 | 2656 // rs > rt |
2739 | 2657 if (rs.code() == rt.rm_.reg_code) { |
2740 // Unsigned comparison. | 2658 break; // No code needs to be emitted. |
2741 case Ugreater: | 2659 } else if (rs.is(zero_reg)) { |
2742 // rs > rt | 2660 if (!is_near(L, bits)) return false; |
2743 sltu(scratch, r2, rs); | 2661 scratch = GetRtAsRegisterHelper(rt, scratch); |
2744 beq(scratch, zero_reg, 2); | 2662 offset = GetOffset(offset, L, bits); |
2745 nop(); | 2663 bltzalc(scratch, offset); |
2746 bal(offset); | 2664 } else if (IsZero(rt)) { |
2747 break; | 2665 if (!is_near(L, bits)) return false; |
2748 case Ugreater_equal: | 2666 offset = GetOffset(offset, L, bits); |
2749 // rs >= rt | 2667 bgtzalc(rs, offset); |
2750 sltu(scratch, rs, r2); | 2668 } else { |
2751 bne(scratch, zero_reg, 2); | 2669 if (!is_near(L, bits)) return false; |
2752 nop(); | 2670 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
2753 bal(offset); | 2671 offset = GetOffset(offset, L, bits); |
2754 break; | 2672 bnezalc(scratch, offset); |
2755 case Uless: | 2673 } |
2756 // rs < r2 | 2674 break; |
2757 sltu(scratch, rs, r2); | 2675 case greater_equal: |
2758 bne(scratch, zero_reg, 2); | 2676 // rs >= rt |
2759 nop(); | 2677 if (rs.code() == rt.rm_.reg_code) { |
2760 bal(offset); | 2678 bits = OffsetSize::kOffset26; |
2761 break; | 2679 if (!is_near(L, bits)) return false; |
2762 case Uless_equal: | 2680 offset = GetOffset(offset, L, bits); |
2763 // rs <= r2 | 2681 balc(offset); |
2764 sltu(scratch, r2, rs); | 2682 } else if (rs.is(zero_reg)) { |
2765 bne(scratch, zero_reg, 2); | 2683 if (!is_near(L, bits)) return false; |
2766 nop(); | 2684 scratch = GetRtAsRegisterHelper(rt, scratch); |
2767 bal(offset); | 2685 offset = GetOffset(offset, L, bits); |
2768 break; | 2686 blezalc(scratch, offset); |
2769 default: | 2687 } else if (IsZero(rt)) { |
2770 UNREACHABLE(); | 2688 if (!is_near(L, bits)) return false; |
2771 } | 2689 offset = GetOffset(offset, L, bits); |
| 2690 bgezalc(rs, offset); |
| 2691 } else { |
| 2692 if (!is_near(L, bits)) return false; |
| 2693 Slt(scratch, rs, rt); |
| 2694 offset = GetOffset(offset, L, bits); |
| 2695 beqzalc(scratch, offset); |
| 2696 } |
| 2697 break; |
| 2698 case less: |
| 2699 // rs < rt |
| 2700 if (rs.code() == rt.rm_.reg_code) { |
| 2701 break; // No code needs to be emitted. |
| 2702 } else if (rs.is(zero_reg)) { |
| 2703 if (!is_near(L, bits)) return false; |
| 2704 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2705 offset = GetOffset(offset, L, bits); |
| 2706 bgtzalc(scratch, offset); |
| 2707 } else if (IsZero(rt)) { |
| 2708 if (!is_near(L, bits)) return false; |
| 2709 offset = GetOffset(offset, L, bits); |
| 2710 bltzalc(rs, offset); |
| 2711 } else { |
| 2712 if (!is_near(L, bits)) return false; |
| 2713 Slt(scratch, rs, rt); |
| 2714 offset = GetOffset(offset, L, bits); |
| 2715 bnezalc(scratch, offset); |
| 2716 } |
| 2717 break; |
| 2718 case less_equal: |
| 2719 // rs <= r2 |
| 2720 if (rs.code() == rt.rm_.reg_code) { |
| 2721 bits = OffsetSize::kOffset26; |
| 2722 if (!is_near(L, bits)) return false; |
| 2723 offset = GetOffset(offset, L, bits); |
| 2724 balc(offset); |
| 2725 } else if (rs.is(zero_reg)) { |
| 2726 if (!is_near(L, bits)) return false; |
| 2727 scratch = GetRtAsRegisterHelper(rt, scratch); |
| 2728 offset = GetOffset(offset, L, bits); |
| 2729 bgezalc(scratch, offset); |
| 2730 } else if (IsZero(rt)) { |
| 2731 if (!is_near(L, bits)) return false; |
| 2732 offset = GetOffset(offset, L, bits); |
| 2733 blezalc(rs, offset); |
| 2734 } else { |
| 2735 if (!is_near(L, bits)) return false; |
| 2736 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
| 2737 offset = GetOffset(offset, L, bits); |
| 2738 beqzalc(scratch, offset); |
| 2739 } |
| 2740 break; |
| 2741 |
| 2742 |
| 2743 // Unsigned comparison. |
| 2744 case Ugreater: |
| 2745 // rs > r2 |
| 2746 if (!is_near(L, bits)) return false; |
| 2747 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
| 2748 offset = GetOffset(offset, L, bits); |
| 2749 bnezalc(scratch, offset); |
| 2750 break; |
| 2751 case Ugreater_equal: |
| 2752 // rs >= r2 |
| 2753 if (!is_near(L, bits)) return false; |
| 2754 Sltu(scratch, rs, rt); |
| 2755 offset = GetOffset(offset, L, bits); |
| 2756 beqzalc(scratch, offset); |
| 2757 break; |
| 2758 case Uless: |
| 2759 // rs < r2 |
| 2760 if (!is_near(L, bits)) return false; |
| 2761 Sltu(scratch, rs, rt); |
| 2762 offset = GetOffset(offset, L, bits); |
| 2763 bnezalc(scratch, offset); |
| 2764 break; |
| 2765 case Uless_equal: |
| 2766 // rs <= r2 |
| 2767 if (!is_near(L, bits)) return false; |
| 2768 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
| 2769 offset = GetOffset(offset, L, bits); |
| 2770 beqzalc(scratch, offset); |
| 2771 break; |
| 2772 default: |
| 2773 UNREACHABLE(); |
| 2774 } |
| 2775 return true; |
| 2776 } |
| 2777 |
| 2778 |
| 2779 // Pre r6 we need to use a bgezal or bltzal, but they can't be used directly |
| 2780 // with the slt instructions. We could use sub or add instead but we would miss |
| 2781 // overflow cases, so we keep slt and add an intermediate third instruction. |
| 2782 bool MacroAssembler::BranchAndLinkShortHelper(int16_t offset, Label* L, |
| 2783 Condition cond, Register rs, |
| 2784 const Operand& rt, |
| 2785 BranchDelaySlot bdslot) { |
| 2786 if (!is_near(L, OffsetSize::kOffset16)) return false; |
| 2787 |
| 2788 Register scratch = t8; |
| 2789 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 2790 |
| 2791 switch (cond) { |
| 2792 case cc_always: |
| 2793 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2794 bal(offset); |
| 2795 break; |
| 2796 case eq: |
| 2797 bne(rs, GetRtAsRegisterHelper(rt, scratch), 2); |
| 2798 nop(); |
| 2799 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2800 bal(offset); |
| 2801 break; |
| 2802 case ne: |
| 2803 beq(rs, GetRtAsRegisterHelper(rt, scratch), 2); |
| 2804 nop(); |
| 2805 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2806 bal(offset); |
| 2807 break; |
| 2808 |
| 2809 // Signed comparison. |
| 2810 case greater: |
| 2811 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
| 2812 addiu(scratch, scratch, -1); |
| 2813 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2814 bgezal(scratch, offset); |
| 2815 break; |
| 2816 case greater_equal: |
| 2817 Slt(scratch, rs, rt); |
| 2818 addiu(scratch, scratch, -1); |
| 2819 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2820 bltzal(scratch, offset); |
| 2821 break; |
| 2822 case less: |
| 2823 Slt(scratch, rs, rt); |
| 2824 addiu(scratch, scratch, -1); |
| 2825 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2826 bgezal(scratch, offset); |
| 2827 break; |
| 2828 case less_equal: |
| 2829 Slt(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
| 2830 addiu(scratch, scratch, -1); |
| 2831 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2832 bltzal(scratch, offset); |
| 2833 break; |
| 2834 |
| 2835 // Unsigned comparison. |
| 2836 case Ugreater: |
| 2837 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
| 2838 addiu(scratch, scratch, -1); |
| 2839 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2840 bgezal(scratch, offset); |
| 2841 break; |
| 2842 case Ugreater_equal: |
| 2843 Sltu(scratch, rs, rt); |
| 2844 addiu(scratch, scratch, -1); |
| 2845 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2846 bltzal(scratch, offset); |
| 2847 break; |
| 2848 case Uless: |
| 2849 Sltu(scratch, rs, rt); |
| 2850 addiu(scratch, scratch, -1); |
| 2851 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2852 bgezal(scratch, offset); |
| 2853 break; |
| 2854 case Uless_equal: |
| 2855 Sltu(scratch, GetRtAsRegisterHelper(rt, scratch), rs); |
| 2856 addiu(scratch, scratch, -1); |
| 2857 offset = GetOffset(offset, L, OffsetSize::kOffset16); |
| 2858 bltzal(scratch, offset); |
| 2859 break; |
| 2860 |
| 2861 default: |
| 2862 UNREACHABLE(); |
2772 } | 2863 } |
2773 | 2864 |
2774 // Emit a nop in the branch delay slot if required. | 2865 // Emit a nop in the branch delay slot if required. |
2775 if (bdslot == PROTECT) | 2866 if (bdslot == PROTECT) |
2776 nop(); | 2867 nop(); |
2777 } | 2868 |
2778 | 2869 return true; |
2779 | 2870 } |
2780 void MacroAssembler::BranchAndLinkShort(Label* L, BranchDelaySlot bdslot) { | 2871 |
2781 bal(shifted_branch_offset(L, false)); | 2872 |
2782 | 2873 bool MacroAssembler::BranchAndLinkShortCheck(int32_t offset, Label* L, |
2783 // Emit a nop in the branch delay slot if required. | 2874 Condition cond, Register rs, |
2784 if (bdslot == PROTECT) | 2875 const Operand& rt, |
2785 nop(); | 2876 BranchDelaySlot bdslot) { |
2786 } | |
2787 | |
2788 | |
2789 void MacroAssembler::BranchAndLinkShort(Label* L, Condition cond, Register rs, | |
2790 const Operand& rt, | |
2791 BranchDelaySlot bdslot) { | |
2792 BRANCH_ARGS_CHECK(cond, rs, rt); | 2877 BRANCH_ARGS_CHECK(cond, rs, rt); |
2793 | 2878 |
2794 int32_t offset = 0; | 2879 if (!L) { |
2795 Register r2 = no_reg; | 2880 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
2796 Register scratch = at; | 2881 DCHECK(is_int26(offset)); |
2797 if (rt.is_reg()) { | 2882 return BranchAndLinkShortHelperR6(offset, nullptr, cond, rs, rt); |
2798 r2 = rt.rm_; | 2883 } else { |
2799 } else if (cond != cc_always) { | 2884 DCHECK(is_int16(offset)); |
2800 r2 = scratch; | 2885 return BranchAndLinkShortHelper(offset, nullptr, cond, rs, rt, bdslot); |
2801 li(r2, rt); | |
2802 } | |
2803 | |
2804 if (!IsMipsArchVariant(kMips32r6)) { | |
2805 BlockTrampolinePoolScope block_trampoline_pool(this); | |
2806 switch (cond) { | |
2807 case cc_always: | |
2808 offset = shifted_branch_offset(L, false); | |
2809 bal(offset); | |
2810 break; | |
2811 case eq: | |
2812 bne(rs, r2, 2); | |
2813 nop(); | |
2814 offset = shifted_branch_offset(L, false); | |
2815 bal(offset); | |
2816 break; | |
2817 case ne: | |
2818 beq(rs, r2, 2); | |
2819 nop(); | |
2820 offset = shifted_branch_offset(L, false); | |
2821 bal(offset); | |
2822 break; | |
2823 | |
2824 // Signed comparison. | |
2825 case greater: | |
2826 slt(scratch, r2, rs); | |
2827 addiu(scratch, scratch, -1); | |
2828 offset = shifted_branch_offset(L, false); | |
2829 bgezal(scratch, offset); | |
2830 break; | |
2831 case greater_equal: | |
2832 slt(scratch, rs, r2); | |
2833 addiu(scratch, scratch, -1); | |
2834 offset = shifted_branch_offset(L, false); | |
2835 bltzal(scratch, offset); | |
2836 break; | |
2837 case less: | |
2838 slt(scratch, rs, r2); | |
2839 addiu(scratch, scratch, -1); | |
2840 offset = shifted_branch_offset(L, false); | |
2841 bgezal(scratch, offset); | |
2842 break; | |
2843 case less_equal: | |
2844 slt(scratch, r2, rs); | |
2845 addiu(scratch, scratch, -1); | |
2846 offset = shifted_branch_offset(L, false); | |
2847 bltzal(scratch, offset); | |
2848 break; | |
2849 | |
2850 // Unsigned comparison. | |
2851 case Ugreater: | |
2852 sltu(scratch, r2, rs); | |
2853 addiu(scratch, scratch, -1); | |
2854 offset = shifted_branch_offset(L, false); | |
2855 bgezal(scratch, offset); | |
2856 break; | |
2857 case Ugreater_equal: | |
2858 sltu(scratch, rs, r2); | |
2859 addiu(scratch, scratch, -1); | |
2860 offset = shifted_branch_offset(L, false); | |
2861 bltzal(scratch, offset); | |
2862 break; | |
2863 case Uless: | |
2864 sltu(scratch, rs, r2); | |
2865 addiu(scratch, scratch, -1); | |
2866 offset = shifted_branch_offset(L, false); | |
2867 bgezal(scratch, offset); | |
2868 break; | |
2869 case Uless_equal: | |
2870 sltu(scratch, r2, rs); | |
2871 addiu(scratch, scratch, -1); | |
2872 offset = shifted_branch_offset(L, false); | |
2873 bltzal(scratch, offset); | |
2874 break; | |
2875 | |
2876 default: | |
2877 UNREACHABLE(); | |
2878 } | 2886 } |
2879 } else { | 2887 } else { |
2880 BlockTrampolinePoolScope block_trampoline_pool(this); | 2888 DCHECK(offset == 0); |
2881 switch (cond) { | 2889 if (IsMipsArchVariant(kMips32r6) && bdslot == PROTECT) { |
2882 case cc_always: | 2890 return BranchAndLinkShortHelperR6(0, L, cond, rs, rt); |
2883 offset = shifted_branch_offset(L, false); | 2891 } else { |
2884 bal(offset); | 2892 return BranchAndLinkShortHelper(0, L, cond, rs, rt, bdslot); |
2885 break; | |
2886 case eq: | |
2887 bne(rs, r2, 2); | |
2888 nop(); | |
2889 offset = shifted_branch_offset(L, false); | |
2890 bal(offset); | |
2891 break; | |
2892 case ne: | |
2893 beq(rs, r2, 2); | |
2894 nop(); | |
2895 offset = shifted_branch_offset(L, false); | |
2896 bal(offset); | |
2897 break; | |
2898 | |
2899 // Signed comparison. | |
2900 case greater: | |
2901 // rs > rt | |
2902 slt(scratch, r2, rs); | |
2903 beq(scratch, zero_reg, 2); | |
2904 nop(); | |
2905 offset = shifted_branch_offset(L, false); | |
2906 bal(offset); | |
2907 break; | |
2908 case greater_equal: | |
2909 // rs >= rt | |
2910 slt(scratch, rs, r2); | |
2911 bne(scratch, zero_reg, 2); | |
2912 nop(); | |
2913 offset = shifted_branch_offset(L, false); | |
2914 bal(offset); | |
2915 break; | |
2916 case less: | |
2917 // rs < r2 | |
2918 slt(scratch, rs, r2); | |
2919 bne(scratch, zero_reg, 2); | |
2920 nop(); | |
2921 offset = shifted_branch_offset(L, false); | |
2922 bal(offset); | |
2923 break; | |
2924 case less_equal: | |
2925 // rs <= r2 | |
2926 slt(scratch, r2, rs); | |
2927 bne(scratch, zero_reg, 2); | |
2928 nop(); | |
2929 offset = shifted_branch_offset(L, false); | |
2930 bal(offset); | |
2931 break; | |
2932 | |
2933 | |
2934 // Unsigned comparison. | |
2935 case Ugreater: | |
2936 // rs > rt | |
2937 sltu(scratch, r2, rs); | |
2938 beq(scratch, zero_reg, 2); | |
2939 nop(); | |
2940 offset = shifted_branch_offset(L, false); | |
2941 bal(offset); | |
2942 break; | |
2943 case Ugreater_equal: | |
2944 // rs >= rt | |
2945 sltu(scratch, rs, r2); | |
2946 bne(scratch, zero_reg, 2); | |
2947 nop(); | |
2948 offset = shifted_branch_offset(L, false); | |
2949 bal(offset); | |
2950 break; | |
2951 case Uless: | |
2952 // rs < r2 | |
2953 sltu(scratch, rs, r2); | |
2954 bne(scratch, zero_reg, 2); | |
2955 nop(); | |
2956 offset = shifted_branch_offset(L, false); | |
2957 bal(offset); | |
2958 break; | |
2959 case Uless_equal: | |
2960 // rs <= r2 | |
2961 sltu(scratch, r2, rs); | |
2962 bne(scratch, zero_reg, 2); | |
2963 nop(); | |
2964 offset = shifted_branch_offset(L, false); | |
2965 bal(offset); | |
2966 break; | |
2967 | |
2968 default: | |
2969 UNREACHABLE(); | |
2970 } | 2893 } |
2971 } | 2894 } |
2972 | 2895 return false; |
2973 // Check that offset could actually hold on an int16_t. | 2896 } |
2974 DCHECK(is_int16(offset)); | 2897 |
2975 | 2898 |
2976 // Emit a nop in the branch delay slot if required. | |
2977 if (bdslot == PROTECT) | |
2978 nop(); | |
2979 } | |
2980 | |
2981 | |
2982 void MacroAssembler::Jump(Register target, | 2899 void MacroAssembler::Jump(Register target, |
2983 Condition cond, | 2900 Condition cond, |
2984 Register rs, | 2901 Register rs, |
2985 const Operand& rt, | 2902 const Operand& rt, |
2986 BranchDelaySlot bd) { | 2903 BranchDelaySlot bd) { |
2987 BlockTrampolinePoolScope block_trampoline_pool(this); | 2904 BlockTrampolinePoolScope block_trampoline_pool(this); |
2988 if (cond == cc_always) { | 2905 if (cond == cc_always) { |
2989 jr(target); | 2906 jr(target); |
2990 } else { | 2907 } else { |
2991 BRANCH_ARGS_CHECK(cond, rs, rt); | 2908 BRANCH_ARGS_CHECK(cond, rs, rt); |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3058 return size * kInstrSize; | 2975 return size * kInstrSize; |
3059 } | 2976 } |
3060 | 2977 |
3061 | 2978 |
3062 // Note: To call gcc-compiled C code on mips, you must call thru t9. | 2979 // Note: To call gcc-compiled C code on mips, you must call thru t9. |
3063 void MacroAssembler::Call(Register target, | 2980 void MacroAssembler::Call(Register target, |
3064 Condition cond, | 2981 Condition cond, |
3065 Register rs, | 2982 Register rs, |
3066 const Operand& rt, | 2983 const Operand& rt, |
3067 BranchDelaySlot bd) { | 2984 BranchDelaySlot bd) { |
| 2985 #ifdef DEBUG |
| 2986 int size = IsPrevInstrCompactBranch() ? kInstrSize : 0; |
| 2987 #endif |
| 2988 |
3068 BlockTrampolinePoolScope block_trampoline_pool(this); | 2989 BlockTrampolinePoolScope block_trampoline_pool(this); |
3069 Label start; | 2990 Label start; |
3070 bind(&start); | 2991 bind(&start); |
3071 if (cond == cc_always) { | 2992 if (cond == cc_always) { |
3072 jalr(target); | 2993 jalr(target); |
3073 } else { | 2994 } else { |
3074 BRANCH_ARGS_CHECK(cond, rs, rt); | 2995 BRANCH_ARGS_CHECK(cond, rs, rt); |
3075 Branch(2, NegateCondition(cond), rs, rt); | 2996 Branch(2, NegateCondition(cond), rs, rt); |
3076 jalr(target); | 2997 jalr(target); |
3077 } | 2998 } |
3078 // Emit a nop in the branch delay slot if required. | 2999 // Emit a nop in the branch delay slot if required. |
3079 if (bd == PROTECT) | 3000 if (bd == PROTECT) |
3080 nop(); | 3001 nop(); |
3081 | 3002 |
3082 DCHECK_EQ(CallSize(target, cond, rs, rt, bd), | 3003 #ifdef DEBUG |
3083 SizeOfCodeGeneratedSince(&start)); | 3004 CHECK_EQ(size + CallSize(target, cond, rs, rt, bd), |
| 3005 SizeOfCodeGeneratedSince(&start)); |
| 3006 #endif |
3084 } | 3007 } |
3085 | 3008 |
3086 | 3009 |
3087 int MacroAssembler::CallSize(Address target, | 3010 int MacroAssembler::CallSize(Address target, |
3088 RelocInfo::Mode rmode, | 3011 RelocInfo::Mode rmode, |
3089 Condition cond, | 3012 Condition cond, |
3090 Register rs, | 3013 Register rs, |
3091 const Operand& rt, | 3014 const Operand& rt, |
3092 BranchDelaySlot bd) { | 3015 BranchDelaySlot bd) { |
3093 int size = CallSize(t9, cond, rs, rt, bd); | 3016 int size = CallSize(t9, cond, rs, rt, bd); |
(...skipping 2787 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5881 void CodePatcher::Emit(Instr instr) { | 5804 void CodePatcher::Emit(Instr instr) { |
5882 masm()->emit(instr); | 5805 masm()->emit(instr); |
5883 } | 5806 } |
5884 | 5807 |
5885 | 5808 |
5886 void CodePatcher::Emit(Address addr) { | 5809 void CodePatcher::Emit(Address addr) { |
5887 masm()->emit(reinterpret_cast<Instr>(addr)); | 5810 masm()->emit(reinterpret_cast<Instr>(addr)); |
5888 } | 5811 } |
5889 | 5812 |
5890 | 5813 |
5891 void CodePatcher::ChangeBranchCondition(Condition cond) { | 5814 void CodePatcher::ChangeBranchCondition(Instr current_instr, |
5892 Instr instr = Assembler::instr_at(masm_.pc_); | 5815 uint32_t new_opcode) { |
5893 DCHECK(Assembler::IsBranch(instr)); | 5816 current_instr = (current_instr & ~kOpcodeMask) | new_opcode; |
5894 uint32_t opcode = Assembler::GetOpcodeField(instr); | 5817 masm_.emit(current_instr); |
5895 // Currently only the 'eq' and 'ne' cond values are supported and the simple | |
5896 // branch instructions (with opcode being the branch type). | |
5897 // There are some special cases (see Assembler::IsBranch()) so extending this | |
5898 // would be tricky. | |
5899 DCHECK(opcode == BEQ || | |
5900 opcode == BNE || | |
5901 opcode == BLEZ || | |
5902 opcode == BGTZ || | |
5903 opcode == BEQL || | |
5904 opcode == BNEL || | |
5905 opcode == BLEZL || | |
5906 opcode == BGTZL); | |
5907 opcode = (cond == eq) ? BEQ : BNE; | |
5908 instr = (instr & ~kOpcodeMask) | opcode; | |
5909 masm_.emit(instr); | |
5910 } | 5818 } |
5911 | 5819 |
5912 | 5820 |
5913 void MacroAssembler::TruncatingDiv(Register result, | 5821 void MacroAssembler::TruncatingDiv(Register result, |
5914 Register dividend, | 5822 Register dividend, |
5915 int32_t divisor) { | 5823 int32_t divisor) { |
5916 DCHECK(!dividend.is(result)); | 5824 DCHECK(!dividend.is(result)); |
5917 DCHECK(!dividend.is(at)); | 5825 DCHECK(!dividend.is(at)); |
5918 DCHECK(!result.is(at)); | 5826 DCHECK(!result.is(at)); |
5919 base::MagicNumbersForDivision<uint32_t> mag = | 5827 base::MagicNumbersForDivision<uint32_t> mag = |
(...skipping 10 matching lines...) Expand all Loading... |
5930 if (mag.shift > 0) sra(result, result, mag.shift); | 5838 if (mag.shift > 0) sra(result, result, mag.shift); |
5931 srl(at, dividend, 31); | 5839 srl(at, dividend, 31); |
5932 Addu(result, result, Operand(at)); | 5840 Addu(result, result, Operand(at)); |
5933 } | 5841 } |
5934 | 5842 |
5935 | 5843 |
5936 } // namespace internal | 5844 } // namespace internal |
5937 } // namespace v8 | 5845 } // namespace v8 |
5938 | 5846 |
5939 #endif // V8_TARGET_ARCH_MIPS | 5847 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |