Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/compiler/code-generator.h" | 5 #include "src/compiler/code-generator.h" |
| 6 | 6 |
| 7 #include "src/arm64/assembler-arm64-inl.h" | 7 #include "src/arm64/assembler-arm64-inl.h" |
| 8 #include "src/arm64/frames-arm64.h" | 8 #include "src/arm64/frames-arm64.h" |
| 9 #include "src/arm64/macro-assembler-arm64-inl.h" | 9 #include "src/arm64/macro-assembler-arm64-inl.h" |
| 10 #include "src/compilation-info.h" | 10 #include "src/compilation-info.h" |
| (...skipping 2211 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 2222 } | 2222 } |
| 2223 } | 2223 } |
| 2224 } | 2224 } |
| 2225 } else if (source->IsFPRegister()) { | 2225 } else if (source->IsFPRegister()) { |
| 2226 VRegister src = g.ToDoubleRegister(source); | 2226 VRegister src = g.ToDoubleRegister(source); |
| 2227 if (destination->IsFPRegister()) { | 2227 if (destination->IsFPRegister()) { |
| 2228 VRegister dst = g.ToDoubleRegister(destination); | 2228 VRegister dst = g.ToDoubleRegister(destination); |
| 2229 __ Fmov(dst, src); | 2229 __ Fmov(dst, src); |
| 2230 } else { | 2230 } else { |
| 2231 DCHECK(destination->IsFPStackSlot()); | 2231 DCHECK(destination->IsFPStackSlot()); |
| 2232 __ Str(src, g.ToMemOperand(destination, masm())); | 2232 MemOperand dst = g.ToMemOperand(destination, masm()); |
| 2233 if (!destination->IsSimd128StackSlot()) { | |
| 2234 __ Str(src, dst); | |
| 2235 } else { | |
| 2236 __ st1(src, dst); | |
|
martyn.capewell
2017/06/08 08:29:13
I'm not sure this will work - I think ToDoubleRegi
bbudge
2017/06/08 20:16:59
I forgot that VRegisters have a size. I think your
| |
| 2237 } | |
| 2233 } | 2238 } |
| 2234 } else if (source->IsFPStackSlot()) { | 2239 } else if (source->IsFPStackSlot()) { |
| 2235 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot()); | 2240 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot()); |
| 2236 MemOperand src = g.ToMemOperand(source, masm()); | 2241 MemOperand src = g.ToMemOperand(source, masm()); |
| 2237 if (destination->IsFPRegister()) { | 2242 if (destination->IsFPRegister()) { |
| 2238 __ Ldr(g.ToDoubleRegister(destination), src); | 2243 VRegister dst = g.ToDoubleRegister(destination); |
| 2244 if (!destination->IsSimd128Register()) { | |
| 2245 __ Ldr(dst, src); | |
| 2246 } else { | |
| 2247 __ ld1(dst, src); | |
| 2248 } | |
| 2239 } else { | 2249 } else { |
| 2240 UseScratchRegisterScope scope(masm()); | 2250 UseScratchRegisterScope scope(masm()); |
| 2241 VRegister temp = scope.AcquireD(); | 2251 VRegister temp = scope.AcquireD(); |
| 2242 __ Ldr(temp, src); | 2252 MemOperand dst = g.ToMemOperand(destination, masm()); |
| 2243 __ Str(temp, g.ToMemOperand(destination, masm())); | 2253 if (!destination->IsSimd128StackSlot()) { |
| 2254 __ Ldr(temp, src); | |
| 2255 __ Str(temp, dst); | |
| 2256 } else { | |
| 2257 __ ld1(temp, src); | |
| 2258 __ st1(temp, dst); | |
| 2259 } | |
| 2244 } | 2260 } |
| 2245 } else { | 2261 } else { |
| 2246 UNREACHABLE(); | 2262 UNREACHABLE(); |
| 2247 } | 2263 } |
| 2248 } | 2264 } |
| 2249 | 2265 |
| 2250 | 2266 |
| 2251 void CodeGenerator::AssembleSwap(InstructionOperand* source, | 2267 void CodeGenerator::AssembleSwap(InstructionOperand* source, |
| 2252 InstructionOperand* destination) { | 2268 InstructionOperand* destination) { |
| 2253 Arm64OperandConverter g(this, nullptr); | 2269 Arm64OperandConverter g(this, nullptr); |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 2265 __ Mov(dst, temp); | 2281 __ Mov(dst, temp); |
| 2266 } else { | 2282 } else { |
| 2267 DCHECK(destination->IsStackSlot()); | 2283 DCHECK(destination->IsStackSlot()); |
| 2268 MemOperand dst = g.ToMemOperand(destination, masm()); | 2284 MemOperand dst = g.ToMemOperand(destination, masm()); |
| 2269 __ Mov(temp, src); | 2285 __ Mov(temp, src); |
| 2270 __ Ldr(src, dst); | 2286 __ Ldr(src, dst); |
| 2271 __ Str(temp, dst); | 2287 __ Str(temp, dst); |
| 2272 } | 2288 } |
| 2273 } else if (source->IsStackSlot() || source->IsFPStackSlot()) { | 2289 } else if (source->IsStackSlot() || source->IsFPStackSlot()) { |
| 2274 UseScratchRegisterScope scope(masm()); | 2290 UseScratchRegisterScope scope(masm()); |
| 2275 DoubleRegister temp_0 = scope.AcquireD(); | 2291 VRegister temp_0 = scope.AcquireD(); |
| 2276 DoubleRegister temp_1 = scope.AcquireD(); | 2292 VRegister temp_1 = scope.AcquireD(); |
| 2277 MemOperand src = g.ToMemOperand(source, masm()); | 2293 MemOperand src = g.ToMemOperand(source, masm()); |
| 2278 MemOperand dst = g.ToMemOperand(destination, masm()); | 2294 MemOperand dst = g.ToMemOperand(destination, masm()); |
| 2279 __ Ldr(temp_0, src); | 2295 if (!source->IsSimd128StackSlot()) { |
| 2280 __ Ldr(temp_1, dst); | 2296 __ Ldr(temp_0, src); |
| 2281 __ Str(temp_0, dst); | 2297 __ Ldr(temp_1, dst); |
| 2282 __ Str(temp_1, src); | 2298 __ Str(temp_0, dst); |
| 2299 __ Str(temp_1, src); | |
| 2300 } else { | |
| 2301 __ ld1(temp_0, src); | |
| 2302 __ ld1(temp_1, dst); | |
| 2303 __ st1(temp_0, dst); | |
| 2304 __ st1(temp_1, src); | |
| 2305 } | |
| 2283 } else if (source->IsFPRegister()) { | 2306 } else if (source->IsFPRegister()) { |
| 2284 UseScratchRegisterScope scope(masm()); | 2307 UseScratchRegisterScope scope(masm()); |
| 2285 VRegister temp = scope.AcquireD(); | 2308 VRegister temp = scope.AcquireD(); |
| 2286 VRegister src = g.ToDoubleRegister(source); | 2309 VRegister src = g.ToDoubleRegister(source); |
| 2287 if (destination->IsFPRegister()) { | 2310 if (destination->IsFPRegister()) { |
| 2288 VRegister dst = g.ToDoubleRegister(destination); | 2311 VRegister dst = g.ToDoubleRegister(destination); |
| 2289 __ Fmov(temp, src); | 2312 __ Fmov(temp, src); |
| 2290 __ Fmov(src, dst); | 2313 __ Fmov(src, dst); |
| 2291 __ Fmov(dst, temp); | 2314 __ Fmov(dst, temp); |
| 2292 } else { | 2315 } else { |
| 2293 DCHECK(destination->IsFPStackSlot()); | 2316 DCHECK(destination->IsFPStackSlot()); |
| 2294 MemOperand dst = g.ToMemOperand(destination, masm()); | 2317 MemOperand dst = g.ToMemOperand(destination, masm()); |
| 2295 __ Fmov(temp, src); | 2318 if (!source->IsSimd128Register()) { |
| 2296 __ Ldr(src, dst); | 2319 __ Fmov(temp, src); |
| 2297 __ Str(temp, dst); | 2320 __ Ldr(src, dst); |
| 2321 __ Str(temp, dst); | |
| 2322 } else { | |
| 2323 __ Fmov(temp, src); | |
| 2324 __ ld1(src, dst); | |
| 2325 __ st1(temp, dst); | |
| 2326 } | |
| 2298 } | 2327 } |
| 2299 } else { | 2328 } else { |
| 2300 // No other combinations are possible. | 2329 // No other combinations are possible. |
| 2301 UNREACHABLE(); | 2330 UNREACHABLE(); |
| 2302 } | 2331 } |
| 2303 } | 2332 } |
| 2304 | 2333 |
| 2305 | 2334 |
| 2306 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { | 2335 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { |
| 2307 // On 64-bit ARM we emit the jump tables inline. | 2336 // On 64-bit ARM we emit the jump tables inline. |
| (...skipping 22 matching lines...) Expand all Loading... | |
| 2330 padding_size -= kInstructionSize; | 2359 padding_size -= kInstructionSize; |
| 2331 } | 2360 } |
| 2332 } | 2361 } |
| 2333 } | 2362 } |
| 2334 | 2363 |
| 2335 #undef __ | 2364 #undef __ |
| 2336 | 2365 |
| 2337 } // namespace compiler | 2366 } // namespace compiler |
| 2338 } // namespace internal | 2367 } // namespace internal |
| 2339 } // namespace v8 | 2368 } // namespace v8 |
| OLD | NEW |