| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "v8.h" | 5 #include "v8.h" |
| 6 | 6 |
| 7 #if V8_TARGET_ARCH_IA32 | 7 #if V8_TARGET_ARCH_X87 |
| 8 | 8 |
| 9 #include "ia32/lithium-gap-resolver-ia32.h" | 9 #include "x87/lithium-gap-resolver-x87.h" |
| 10 #include "ia32/lithium-codegen-ia32.h" | 10 #include "x87/lithium-codegen-x87.h" |
| 11 | 11 |
| 12 namespace v8 { | 12 namespace v8 { |
| 13 namespace internal { | 13 namespace internal { |
| 14 | 14 |
| 15 LGapResolver::LGapResolver(LCodeGen* owner) | 15 LGapResolver::LGapResolver(LCodeGen* owner) |
| 16 : cgen_(owner), | 16 : cgen_(owner), |
| 17 moves_(32, owner->zone()), | 17 moves_(32, owner->zone()), |
| 18 source_uses_(), | 18 source_uses_(), |
| 19 destination_uses_(), | 19 destination_uses_(), |
| 20 spilled_register_(-1) {} | 20 spilled_register_(-1) {} |
| (...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 288 if (cgen_->IsInteger32(constant_source)) { | 288 if (cgen_->IsInteger32(constant_source)) { |
| 289 __ Move(dst, cgen_->ToImmediate(constant_source, r)); | 289 __ Move(dst, cgen_->ToImmediate(constant_source, r)); |
| 290 } else { | 290 } else { |
| 291 __ LoadObject(dst, cgen_->ToHandle(constant_source)); | 291 __ LoadObject(dst, cgen_->ToHandle(constant_source)); |
| 292 } | 292 } |
| 293 } else if (destination->IsDoubleRegister()) { | 293 } else if (destination->IsDoubleRegister()) { |
| 294 double v = cgen_->ToDouble(constant_source); | 294 double v = cgen_->ToDouble(constant_source); |
| 295 uint64_t int_val = BitCast<uint64_t, double>(v); | 295 uint64_t int_val = BitCast<uint64_t, double>(v); |
| 296 int32_t lower = static_cast<int32_t>(int_val); | 296 int32_t lower = static_cast<int32_t>(int_val); |
| 297 int32_t upper = static_cast<int32_t>(int_val >> kBitsPerInt); | 297 int32_t upper = static_cast<int32_t>(int_val >> kBitsPerInt); |
| 298 XMMRegister dst = cgen_->ToDoubleRegister(destination); | 298 __ push(Immediate(upper)); |
| 299 if (int_val == 0) { | 299 __ push(Immediate(lower)); |
| 300 __ xorps(dst, dst); | 300 X87Register dst = cgen_->ToX87Register(destination); |
| 301 } else { | 301 cgen_->X87Mov(dst, MemOperand(esp, 0)); |
| 302 __ push(Immediate(upper)); | 302 __ add(esp, Immediate(kDoubleSize)); |
| 303 __ push(Immediate(lower)); | |
| 304 __ movsd(dst, Operand(esp, 0)); | |
| 305 __ add(esp, Immediate(kDoubleSize)); | |
| 306 } | |
| 307 } else { | 303 } else { |
| 308 ASSERT(destination->IsStackSlot()); | 304 ASSERT(destination->IsStackSlot()); |
| 309 Operand dst = cgen_->ToOperand(destination); | 305 Operand dst = cgen_->ToOperand(destination); |
| 310 Representation r = cgen_->IsSmi(constant_source) | 306 Representation r = cgen_->IsSmi(constant_source) |
| 311 ? Representation::Smi() : Representation::Integer32(); | 307 ? Representation::Smi() : Representation::Integer32(); |
| 312 if (cgen_->IsInteger32(constant_source)) { | 308 if (cgen_->IsInteger32(constant_source)) { |
| 313 __ Move(dst, cgen_->ToImmediate(constant_source, r)); | 309 __ Move(dst, cgen_->ToImmediate(constant_source, r)); |
| 314 } else { | 310 } else { |
| 315 Register tmp = EnsureTempRegister(); | 311 Register tmp = EnsureTempRegister(); |
| 316 __ LoadObject(tmp, cgen_->ToHandle(constant_source)); | 312 __ LoadObject(tmp, cgen_->ToHandle(constant_source)); |
| 317 __ mov(dst, tmp); | 313 __ mov(dst, tmp); |
| 318 } | 314 } |
| 319 } | 315 } |
| 320 | 316 |
| 321 } else if (source->IsDoubleRegister()) { | 317 } else if (source->IsDoubleRegister()) { |
| 322 XMMRegister src = cgen_->ToDoubleRegister(source); | 318 // load from the register onto the stack, store in destination, which must |
| 323 if (destination->IsDoubleRegister()) { | 319 // be a double stack slot in the non-SSE2 case. |
| 324 XMMRegister dst = cgen_->ToDoubleRegister(destination); | 320 ASSERT(destination->IsDoubleStackSlot()); |
| 325 __ movaps(dst, src); | 321 Operand dst = cgen_->ToOperand(destination); |
| 322 X87Register src = cgen_->ToX87Register(source); |
| 323 cgen_->X87Mov(dst, src); |
| 324 } else if (source->IsDoubleStackSlot()) { |
| 325 // load from the stack slot on top of the floating point stack, and then |
| 326 // store in destination. If destination is a double register, then it |
| 327 // represents the top of the stack and nothing needs to be done. |
| 328 if (destination->IsDoubleStackSlot()) { |
| 329 Register tmp = EnsureTempRegister(); |
| 330 Operand src0 = cgen_->ToOperand(source); |
| 331 Operand src1 = cgen_->HighOperand(source); |
| 332 Operand dst0 = cgen_->ToOperand(destination); |
| 333 Operand dst1 = cgen_->HighOperand(destination); |
| 334 __ mov(tmp, src0); // Then use tmp to copy source to destination. |
| 335 __ mov(dst0, tmp); |
| 336 __ mov(tmp, src1); |
| 337 __ mov(dst1, tmp); |
| 326 } else { | 338 } else { |
| 327 ASSERT(destination->IsDoubleStackSlot()); | 339 Operand src = cgen_->ToOperand(source); |
| 328 Operand dst = cgen_->ToOperand(destination); | 340 X87Register dst = cgen_->ToX87Register(destination); |
| 329 __ movsd(dst, src); | 341 cgen_->X87Mov(dst, src); |
| 330 } | |
| 331 } else if (source->IsDoubleStackSlot()) { | |
| 332 ASSERT(destination->IsDoubleRegister() || | |
| 333 destination->IsDoubleStackSlot()); | |
| 334 Operand src = cgen_->ToOperand(source); | |
| 335 if (destination->IsDoubleRegister()) { | |
| 336 XMMRegister dst = cgen_->ToDoubleRegister(destination); | |
| 337 __ movsd(dst, src); | |
| 338 } else { | |
| 339 // We rely on having xmm0 available as a fixed scratch register. | |
| 340 Operand dst = cgen_->ToOperand(destination); | |
| 341 __ movsd(xmm0, src); | |
| 342 __ movsd(dst, xmm0); | |
| 343 } | 342 } |
| 344 } else { | 343 } else { |
| 345 UNREACHABLE(); | 344 UNREACHABLE(); |
| 346 } | 345 } |
| 347 | 346 |
| 348 RemoveMove(index); | 347 RemoveMove(index); |
| 349 } | 348 } |
| 350 | 349 |
| 351 | 350 |
| 352 void LGapResolver::EmitSwap(int index) { | 351 void LGapResolver::EmitSwap(int index) { |
| (...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 396 __ xor_(tmp0, src); | 395 __ xor_(tmp0, src); |
| 397 __ xor_(src, tmp0); | 396 __ xor_(src, tmp0); |
| 398 __ xor_(tmp0, src); | 397 __ xor_(tmp0, src); |
| 399 __ mov(dst, tmp0); | 398 __ mov(dst, tmp0); |
| 400 } else { | 399 } else { |
| 401 __ mov(tmp0, dst); | 400 __ mov(tmp0, dst); |
| 402 __ mov(tmp1, src); | 401 __ mov(tmp1, src); |
| 403 __ mov(dst, tmp1); | 402 __ mov(dst, tmp1); |
| 404 __ mov(src, tmp0); | 403 __ mov(src, tmp0); |
| 405 } | 404 } |
| 406 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { | |
| 407 // XMM register-register swap. We rely on having xmm0 | |
| 408 // available as a fixed scratch register. | |
| 409 XMMRegister src = cgen_->ToDoubleRegister(source); | |
| 410 XMMRegister dst = cgen_->ToDoubleRegister(destination); | |
| 411 __ movaps(xmm0, src); | |
| 412 __ movaps(src, dst); | |
| 413 __ movaps(dst, xmm0); | |
| 414 } else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) { | |
| 415 // XMM register-memory swap. We rely on having xmm0 | |
| 416 // available as a fixed scratch register. | |
| 417 ASSERT(source->IsDoubleStackSlot() || destination->IsDoubleStackSlot()); | |
| 418 XMMRegister reg = cgen_->ToDoubleRegister(source->IsDoubleRegister() | |
| 419 ? source | |
| 420 : destination); | |
| 421 Operand other = | |
| 422 cgen_->ToOperand(source->IsDoubleRegister() ? destination : source); | |
| 423 __ movsd(xmm0, other); | |
| 424 __ movsd(other, reg); | |
| 425 __ movaps(reg, xmm0); | |
| 426 } else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) { | |
| 427 // Double-width memory-to-memory. Spill on demand to use a general | |
| 428 // purpose temporary register and also rely on having xmm0 available as | |
| 429 // a fixed scratch register. | |
| 430 Register tmp = EnsureTempRegister(); | |
| 431 Operand src0 = cgen_->ToOperand(source); | |
| 432 Operand src1 = cgen_->HighOperand(source); | |
| 433 Operand dst0 = cgen_->ToOperand(destination); | |
| 434 Operand dst1 = cgen_->HighOperand(destination); | |
| 435 __ movsd(xmm0, dst0); // Save destination in xmm0. | |
| 436 __ mov(tmp, src0); // Then use tmp to copy source to destination. | |
| 437 __ mov(dst0, tmp); | |
| 438 __ mov(tmp, src1); | |
| 439 __ mov(dst1, tmp); | |
| 440 __ movsd(src0, xmm0); | |
| 441 | |
| 442 } else { | 405 } else { |
| 443 // No other combinations are possible. | 406 // No other combinations are possible. |
| 444 UNREACHABLE(); | 407 UNREACHABLE(); |
| 445 } | 408 } |
| 446 | 409 |
| 447 // The swap of source and destination has executed a move from source to | 410 // The swap of source and destination has executed a move from source to |
| 448 // destination. | 411 // destination. |
| 449 RemoveMove(index); | 412 RemoveMove(index); |
| 450 | 413 |
| 451 // Any unperformed (including pending) move with a source of either | 414 // Any unperformed (including pending) move with a source of either |
| (...skipping 20 matching lines...) Expand all Loading... |
| 472 source_uses_[source->index()] = CountSourceUses(source); | 435 source_uses_[source->index()] = CountSourceUses(source); |
| 473 } else if (destination->IsRegister()) { | 436 } else if (destination->IsRegister()) { |
| 474 source_uses_[destination->index()] = CountSourceUses(destination); | 437 source_uses_[destination->index()] = CountSourceUses(destination); |
| 475 } | 438 } |
| 476 } | 439 } |
| 477 | 440 |
| 478 #undef __ | 441 #undef __ |
| 479 | 442 |
| 480 } } // namespace v8::internal | 443 } } // namespace v8::internal |
| 481 | 444 |
| 482 #endif // V8_TARGET_ARCH_IA32 | 445 #endif // V8_TARGET_ARCH_X87 |
| OLD | NEW |