OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
317 if (cgen_->IsInteger32(constant_source)) { | 317 if (cgen_->IsInteger32(constant_source)) { |
318 __ Set(dst, cgen_->ToInteger32Immediate(constant_source)); | 318 __ Set(dst, cgen_->ToInteger32Immediate(constant_source)); |
319 } else { | 319 } else { |
320 Register tmp = EnsureTempRegister(); | 320 Register tmp = EnsureTempRegister(); |
321 __ LoadObject(tmp, cgen_->ToHandle(constant_source)); | 321 __ LoadObject(tmp, cgen_->ToHandle(constant_source)); |
322 __ mov(dst, tmp); | 322 __ mov(dst, tmp); |
323 } | 323 } |
324 } | 324 } |
325 | 325 |
326 } else if (source->IsDoubleRegister()) { | 326 } else if (source->IsDoubleRegister()) { |
327 CpuFeatureScope scope(cgen_->masm(), SSE2); | 327 if (CpuFeatures::IsSupported(SSE2)) { |
328 XMMRegister src = cgen_->ToDoubleRegister(source); | 328 CpuFeatureScope scope(cgen_->masm(), SSE2); |
329 if (destination->IsDoubleRegister()) { | 329 XMMRegister src = cgen_->ToDoubleRegister(source); |
330 XMMRegister dst = cgen_->ToDoubleRegister(destination); | 330 if (destination->IsDoubleRegister()) { |
331 __ movaps(dst, src); | 331 XMMRegister dst = cgen_->ToDoubleRegister(destination); |
| 332 __ movaps(dst, src); |
| 333 } else { |
| 334 ASSERT(destination->IsDoubleStackSlot()); |
| 335 Operand dst = cgen_->ToOperand(destination); |
| 336 __ movdbl(dst, src); |
| 337 } |
332 } else { | 338 } else { |
| 339 // load from the register onto the stack, store in destination, which must |
| 340 // be a double stack slot in the non-SSE2 case. |
| 341 ASSERT(source->index() == 0); // source is on top of the stack |
333 ASSERT(destination->IsDoubleStackSlot()); | 342 ASSERT(destination->IsDoubleStackSlot()); |
334 Operand dst = cgen_->ToOperand(destination); | 343 Operand dst = cgen_->ToOperand(destination); |
335 __ movdbl(dst, src); | 344 cgen_->ReadX87Operand(dst); |
336 } | 345 } |
337 } else if (source->IsDoubleStackSlot()) { | 346 } else if (source->IsDoubleStackSlot()) { |
338 CpuFeatureScope scope(cgen_->masm(), SSE2); | 347 if (CpuFeatures::IsSupported(SSE2)) { |
339 ASSERT(destination->IsDoubleRegister() || | 348 CpuFeatureScope scope(cgen_->masm(), SSE2); |
340 destination->IsDoubleStackSlot()); | 349 ASSERT(destination->IsDoubleRegister() || |
341 Operand src = cgen_->ToOperand(source); | 350 destination->IsDoubleStackSlot()); |
342 if (destination->IsDoubleRegister()) { | 351 Operand src = cgen_->ToOperand(source); |
343 XMMRegister dst = cgen_->ToDoubleRegister(destination); | 352 if (destination->IsDoubleRegister()) { |
344 __ movdbl(dst, src); | 353 XMMRegister dst = cgen_->ToDoubleRegister(destination); |
| 354 __ movdbl(dst, src); |
| 355 } else { |
| 356 // We rely on having xmm0 available as a fixed scratch register. |
| 357 Operand dst = cgen_->ToOperand(destination); |
| 358 __ movdbl(xmm0, src); |
| 359 __ movdbl(dst, xmm0); |
| 360 } |
345 } else { | 361 } else { |
346 // We rely on having xmm0 available as a fixed scratch register. | 362 // load from the stack slot on top of the floating point stack, and then |
347 Operand dst = cgen_->ToOperand(destination); | 363 // store in destination. If destination is a double register, then it |
348 __ movdbl(xmm0, src); | 364 // represents the top of the stack and nothing needs to be done. |
349 __ movdbl(dst, xmm0); | 365 if (destination->IsDoubleStackSlot()) { |
| 366 Register tmp = EnsureTempRegister(); |
| 367 Operand src0 = cgen_->ToOperand(source); |
| 368 Operand src1 = cgen_->HighOperand(source); |
| 369 Operand dst0 = cgen_->ToOperand(destination); |
| 370 Operand dst1 = cgen_->HighOperand(destination); |
| 371 __ mov(tmp, src0); // Then use tmp to copy source to destination. |
| 372 __ mov(dst0, tmp); |
| 373 __ mov(tmp, src1); |
| 374 __ mov(dst1, tmp); |
| 375 } else { |
| 376 Operand src = cgen_->ToOperand(source); |
| 377 if (cgen_->X87StackNonEmpty()) { |
| 378 cgen_->PopX87(); |
| 379 } |
| 380 cgen_->PushX87DoubleOperand(src); |
| 381 } |
350 } | 382 } |
351 } else { | 383 } else { |
352 UNREACHABLE(); | 384 UNREACHABLE(); |
353 } | 385 } |
354 | 386 |
355 RemoveMove(index); | 387 RemoveMove(index); |
356 } | 388 } |
357 | 389 |
358 | 390 |
359 void LGapResolver::EmitSwap(int index) { | 391 void LGapResolver::EmitSwap(int index) { |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
412 } | 444 } |
413 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { | 445 } else if (source->IsDoubleRegister() && destination->IsDoubleRegister()) { |
414 CpuFeatureScope scope(cgen_->masm(), SSE2); | 446 CpuFeatureScope scope(cgen_->masm(), SSE2); |
415 // XMM register-register swap. We rely on having xmm0 | 447 // XMM register-register swap. We rely on having xmm0 |
416 // available as a fixed scratch register. | 448 // available as a fixed scratch register. |
417 XMMRegister src = cgen_->ToDoubleRegister(source); | 449 XMMRegister src = cgen_->ToDoubleRegister(source); |
418 XMMRegister dst = cgen_->ToDoubleRegister(destination); | 450 XMMRegister dst = cgen_->ToDoubleRegister(destination); |
419 __ movaps(xmm0, src); | 451 __ movaps(xmm0, src); |
420 __ movaps(src, dst); | 452 __ movaps(src, dst); |
421 __ movaps(dst, xmm0); | 453 __ movaps(dst, xmm0); |
422 | |
423 } else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) { | 454 } else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) { |
424 CpuFeatureScope scope(cgen_->masm(), SSE2); | 455 CpuFeatureScope scope(cgen_->masm(), SSE2); |
425 // XMM register-memory swap. We rely on having xmm0 | 456 // XMM register-memory swap. We rely on having xmm0 |
426 // available as a fixed scratch register. | 457 // available as a fixed scratch register. |
427 ASSERT(source->IsDoubleStackSlot() || destination->IsDoubleStackSlot()); | 458 ASSERT(source->IsDoubleStackSlot() || destination->IsDoubleStackSlot()); |
428 XMMRegister reg = cgen_->ToDoubleRegister(source->IsDoubleRegister() | 459 XMMRegister reg = cgen_->ToDoubleRegister(source->IsDoubleRegister() |
429 ? source | 460 ? source |
430 : destination); | 461 : destination); |
431 Operand other = | 462 Operand other = |
432 cgen_->ToOperand(source->IsDoubleRegister() ? destination : source); | 463 cgen_->ToOperand(source->IsDoubleRegister() ? destination : source); |
433 __ movdbl(xmm0, other); | 464 __ movdbl(xmm0, other); |
434 __ movdbl(other, reg); | 465 __ movdbl(other, reg); |
435 __ movdbl(reg, Operand(xmm0)); | 466 __ movdbl(reg, Operand(xmm0)); |
436 | |
437 } else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) { | 467 } else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) { |
438 CpuFeatureScope scope(cgen_->masm(), SSE2); | 468 CpuFeatureScope scope(cgen_->masm(), SSE2); |
439 // Double-width memory-to-memory. Spill on demand to use a general | 469 // Double-width memory-to-memory. Spill on demand to use a general |
440 // purpose temporary register and also rely on having xmm0 available as | 470 // purpose temporary register and also rely on having xmm0 available as |
441 // a fixed scratch register. | 471 // a fixed scratch register. |
442 Register tmp = EnsureTempRegister(); | 472 Register tmp = EnsureTempRegister(); |
443 Operand src0 = cgen_->ToOperand(source); | 473 Operand src0 = cgen_->ToOperand(source); |
444 Operand src1 = cgen_->HighOperand(source); | 474 Operand src1 = cgen_->HighOperand(source); |
445 Operand dst0 = cgen_->ToOperand(destination); | 475 Operand dst0 = cgen_->ToOperand(destination); |
446 Operand dst1 = cgen_->HighOperand(destination); | 476 Operand dst1 = cgen_->HighOperand(destination); |
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
485 } else if (destination->IsRegister()) { | 515 } else if (destination->IsRegister()) { |
486 source_uses_[destination->index()] = CountSourceUses(destination); | 516 source_uses_[destination->index()] = CountSourceUses(destination); |
487 } | 517 } |
488 } | 518 } |
489 | 519 |
490 #undef __ | 520 #undef __ |
491 | 521 |
492 } } // namespace v8::internal | 522 } } // namespace v8::internal |
493 | 523 |
494 #endif // V8_TARGET_ARCH_IA32 | 524 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |