OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 306 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
317 if (cgen_->IsInteger32(constant_source)) { | 317 if (cgen_->IsInteger32(constant_source)) { |
318 __ Set(dst, cgen_->ToInteger32Immediate(constant_source)); | 318 __ Set(dst, cgen_->ToInteger32Immediate(constant_source)); |
319 } else { | 319 } else { |
320 Register tmp = EnsureTempRegister(); | 320 Register tmp = EnsureTempRegister(); |
321 __ LoadObject(tmp, cgen_->ToHandle(constant_source)); | 321 __ LoadObject(tmp, cgen_->ToHandle(constant_source)); |
322 __ mov(dst, tmp); | 322 __ mov(dst, tmp); |
323 } | 323 } |
324 } | 324 } |
325 | 325 |
326 } else if (source->IsDoubleRegister()) { | 326 } else if (source->IsDoubleRegister()) { |
327 if (CpuFeatures::IsSupported(SSE2)) { | 327 CpuFeatures::Scope scope(SSE2); |
328 CpuFeatures::Scope scope(SSE2); | 328 XMMRegister src = cgen_->ToDoubleRegister(source); |
329 XMMRegister src = cgen_->ToDoubleRegister(source); | 329 if (destination->IsDoubleRegister()) { |
330 if (destination->IsDoubleRegister()) { | 330 XMMRegister dst = cgen_->ToDoubleRegister(destination); |
331 XMMRegister dst = cgen_->ToDoubleRegister(destination); | 331 __ movaps(dst, src); |
332 __ movaps(dst, src); | |
333 } else { | |
334 ASSERT(destination->IsDoubleStackSlot()); | |
335 Operand dst = cgen_->ToOperand(destination); | |
336 __ movdbl(dst, src); | |
337 } | |
338 } else { | 332 } else { |
339 UNREACHABLE(); | 333 ASSERT(destination->IsDoubleStackSlot()); |
| 334 Operand dst = cgen_->ToOperand(destination); |
| 335 __ movdbl(dst, src); |
340 } | 336 } |
341 } else if (source->IsDoubleStackSlot()) { | 337 } else if (source->IsDoubleStackSlot()) { |
342 if (CpuFeatures::IsSupported(SSE2)) { | 338 CpuFeatures::Scope scope(SSE2); |
343 CpuFeatures::Scope scope(SSE2); | 339 ASSERT(destination->IsDoubleRegister() || |
344 ASSERT(destination->IsDoubleRegister() || | 340 destination->IsDoubleStackSlot()); |
345 destination->IsDoubleStackSlot()); | 341 Operand src = cgen_->ToOperand(source); |
346 Operand src = cgen_->ToOperand(source); | 342 if (destination->IsDoubleRegister()) { |
347 if (destination->IsDoubleRegister()) { | 343 XMMRegister dst = cgen_->ToDoubleRegister(destination); |
348 XMMRegister dst = cgen_->ToDoubleRegister(destination); | 344 __ movdbl(dst, src); |
349 __ movdbl(dst, src); | |
350 } else { | |
351 // We rely on having xmm0 available as a fixed scratch register. | |
352 Operand dst = cgen_->ToOperand(destination); | |
353 __ movdbl(xmm0, src); | |
354 __ movdbl(dst, xmm0); | |
355 } | |
356 } else { | 345 } else { |
357 UNREACHABLE(); | 346 // We rely on having xmm0 available as a fixed scratch register. |
| 347 Operand dst = cgen_->ToOperand(destination); |
| 348 __ movdbl(xmm0, src); |
| 349 __ movdbl(dst, xmm0); |
358 } | 350 } |
359 } else { | 351 } else { |
360 UNREACHABLE(); | 352 UNREACHABLE(); |
361 } | 353 } |
362 | 354 |
363 RemoveMove(index); | 355 RemoveMove(index); |
364 } | 356 } |
365 | 357 |
366 | 358 |
367 void LGapResolver::EmitSwap(int index) { | 359 void LGapResolver::EmitSwap(int index) { |
(...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
422 CpuFeatures::Scope scope(SSE2); | 414 CpuFeatures::Scope scope(SSE2); |
423 // XMM register-register swap. We rely on having xmm0 | 415 // XMM register-register swap. We rely on having xmm0 |
424 // available as a fixed scratch register. | 416 // available as a fixed scratch register. |
425 XMMRegister src = cgen_->ToDoubleRegister(source); | 417 XMMRegister src = cgen_->ToDoubleRegister(source); |
426 XMMRegister dst = cgen_->ToDoubleRegister(destination); | 418 XMMRegister dst = cgen_->ToDoubleRegister(destination); |
427 __ movaps(xmm0, src); | 419 __ movaps(xmm0, src); |
428 __ movaps(src, dst); | 420 __ movaps(src, dst); |
429 __ movaps(dst, xmm0); | 421 __ movaps(dst, xmm0); |
430 | 422 |
431 } else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) { | 423 } else if (source->IsDoubleRegister() || destination->IsDoubleRegister()) { |
| 424 CpuFeatures::Scope scope(SSE2); |
432 // XMM register-memory swap. We rely on having xmm0 | 425 // XMM register-memory swap. We rely on having xmm0 |
433 // available as a fixed scratch register. | 426 // available as a fixed scratch register. |
434 ASSERT(source->IsDoubleStackSlot() || destination->IsDoubleStackSlot()); | 427 ASSERT(source->IsDoubleStackSlot() || destination->IsDoubleStackSlot()); |
435 XMMRegister reg = cgen_->ToDoubleRegister(source->IsDoubleRegister() | 428 XMMRegister reg = cgen_->ToDoubleRegister(source->IsDoubleRegister() |
436 ? source | 429 ? source |
437 : destination); | 430 : destination); |
438 Operand other = | 431 Operand other = |
439 cgen_->ToOperand(source->IsDoubleRegister() ? destination : source); | 432 cgen_->ToOperand(source->IsDoubleRegister() ? destination : source); |
440 __ movdbl(xmm0, other); | 433 __ movdbl(xmm0, other); |
441 __ movdbl(other, reg); | 434 __ movdbl(other, reg); |
442 __ movdbl(reg, Operand(xmm0)); | 435 __ movdbl(reg, Operand(xmm0)); |
443 | 436 |
444 } else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) { | 437 } else if (source->IsDoubleStackSlot() && destination->IsDoubleStackSlot()) { |
| 438 CpuFeatures::Scope scope(SSE2); |
445 // Double-width memory-to-memory. Spill on demand to use a general | 439 // Double-width memory-to-memory. Spill on demand to use a general |
446 // purpose temporary register and also rely on having xmm0 available as | 440 // purpose temporary register and also rely on having xmm0 available as |
447 // a fixed scratch register. | 441 // a fixed scratch register. |
448 Register tmp = EnsureTempRegister(); | 442 Register tmp = EnsureTempRegister(); |
449 Operand src0 = cgen_->ToOperand(source); | 443 Operand src0 = cgen_->ToOperand(source); |
450 Operand src1 = cgen_->HighOperand(source); | 444 Operand src1 = cgen_->HighOperand(source); |
451 Operand dst0 = cgen_->ToOperand(destination); | 445 Operand dst0 = cgen_->ToOperand(destination); |
452 Operand dst1 = cgen_->HighOperand(destination); | 446 Operand dst1 = cgen_->HighOperand(destination); |
453 __ movdbl(xmm0, dst0); // Save destination in xmm0. | 447 __ movdbl(xmm0, dst0); // Save destination in xmm0. |
454 __ mov(tmp, src0); // Then use tmp to copy source to destination. | 448 __ mov(tmp, src0); // Then use tmp to copy source to destination. |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
491 } else if (destination->IsRegister()) { | 485 } else if (destination->IsRegister()) { |
492 source_uses_[destination->index()] = CountSourceUses(destination); | 486 source_uses_[destination->index()] = CountSourceUses(destination); |
493 } | 487 } |
494 } | 488 } |
495 | 489 |
496 #undef __ | 490 #undef __ |
497 | 491 |
498 } } // namespace v8::internal | 492 } } // namespace v8::internal |
499 | 493 |
500 #endif // V8_TARGET_ARCH_IA32 | 494 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |