Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(72)

Side by Side Diff: src/ia32/lithium-codegen-ia32.cc

Issue 18041003: Implement X87 stack tracking and x87 multiplication (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: rebase Created 7 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-gap-resolver-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 335 matching lines...) Expand 10 before | Expand all | Expand 10 after
346 current_instruction_, 346 current_instruction_,
347 instr->hydrogen_value()->id(), 347 instr->hydrogen_value()->id(),
348 instr->Mnemonic()); 348 instr->Mnemonic());
349 } 349 }
350 350
351 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr); 351 if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr);
352 352
353 instr->CompileToNative(this); 353 instr->CompileToNative(this);
354 354
355 if (!CpuFeatures::IsSupported(SSE2)) { 355 if (!CpuFeatures::IsSupported(SSE2)) {
356 ASSERT(!instr->HasDoubleRegisterResult() || x87_stack_depth_ == 1);
357 if (FLAG_debug_code && FLAG_enable_slow_asserts) { 356 if (FLAG_debug_code && FLAG_enable_slow_asserts) {
358 __ VerifyX87StackDepth(x87_stack_depth_); 357 __ VerifyX87StackDepth(x87_stack_depth_);
359 } 358 }
360 } 359 }
361 } 360 }
362 EnsureSpaceForLazyDeopt(); 361 EnsureSpaceForLazyDeopt();
363 return !is_aborted(); 362 return !is_aborted();
364 } 363 }
365 364
366 365
(...skipping 127 matching lines...) Expand 10 before | Expand all | Expand 10 after
494 safepoints_.Emit(masm(), GetStackSlotCount()); 493 safepoints_.Emit(masm(), GetStackSlotCount());
495 return !is_aborted(); 494 return !is_aborted();
496 } 495 }
497 496
498 497
499 Register LCodeGen::ToRegister(int index) const { 498 Register LCodeGen::ToRegister(int index) const {
500 return Register::FromAllocationIndex(index); 499 return Register::FromAllocationIndex(index);
501 } 500 }
502 501
503 502
503 X87Register LCodeGen::ToX87Register(int index) const {
504 return X87Register::FromAllocationIndex(index);
505 }
506
507
504 XMMRegister LCodeGen::ToDoubleRegister(int index) const { 508 XMMRegister LCodeGen::ToDoubleRegister(int index) const {
505 return XMMRegister::FromAllocationIndex(index); 509 return XMMRegister::FromAllocationIndex(index);
506 } 510 }
507 511
508 512
509 bool LCodeGen::IsX87TopOfStack(LOperand* op) const { 513 void LCodeGen::X87LoadForUsage(X87Register reg) {
510 return op->IsDoubleRegister(); 514 ASSERT(X87StackContains(reg));
515 X87Fxch(reg);
516 x87_stack_depth_--;
511 } 517 }
512 518
513 519
514 void LCodeGen::ReadX87Operand(Operand dst) { 520 void LCodeGen::X87Fxch(X87Register reg, int other_slot) {
515 ASSERT(x87_stack_depth_ == 1); 521 ASSERT(X87StackContains(reg) && x87_stack_depth_ > other_slot);
522 int i = X87ArrayIndex(reg);
523 int st = x87_st2idx(i);
524 if (st != other_slot) {
525 int other_i = x87_st2idx(other_slot);
526 X87Register other = x87_stack_[other_i];
527 x87_stack_[other_i] = reg;
528 x87_stack_[i] = other;
529 if (st == 0) {
530 __ fxch(other_slot);
531 } else if (other_slot == 0) {
532 __ fxch(st);
533 } else {
534 __ fxch(st);
535 __ fxch(other_slot);
536 __ fxch(st);
537 }
538 }
539 }
540
541
542 int LCodeGen::x87_st2idx(int pos) {
543 return x87_stack_depth_ - pos - 1;
544 }
545
546
547 int LCodeGen::X87ArrayIndex(X87Register reg) {
548 for (int i = 0; i < x87_stack_depth_; i++) {
549 if (x87_stack_[i].is(reg)) return i;
550 }
551 UNREACHABLE();
552 return -1;
553 }
554
555
556 bool LCodeGen::X87StackContains(X87Register reg) {
557 for (int i = 0; i < x87_stack_depth_; i++) {
558 if (x87_stack_[i].is(reg)) return true;
559 }
560 return false;
561 }
562
563
564 void LCodeGen::X87Free(X87Register reg) {
565 ASSERT(X87StackContains(reg));
566 int i = X87ArrayIndex(reg);
567 int st = x87_st2idx(i);
568 if (st > 0) {
569 // keep track of how fstp(i) changes the order of elements
570 int tos_i = x87_st2idx(0);
571 x87_stack_[i] = x87_stack_[tos_i];
572 }
573 x87_stack_depth_--;
574 __ fstp(st);
575 }
576
577
578 void LCodeGen::X87Mov(X87Register dst, Operand src, X87OperandType opts) {
579 if (X87StackContains(dst)) {
580 X87Fxch(dst);
581 __ fstp(0);
582 } else {
583 ASSERT(x87_stack_depth_ < X87Register::kNumAllocatableRegisters);
584 x87_stack_[x87_stack_depth_] = dst;
585 x87_stack_depth_++;
586 }
587 X87Fld(src, opts);
588 }
589
590
591 void LCodeGen::X87Fld(Operand src, X87OperandType opts) {
592 if (opts == kX87DoubleOperand) {
593 __ fld_d(src);
594 } else if (opts == kX87FloatOperand) {
595 __ fld_s(src);
596 } else if (opts == kX87IntOperand) {
597 __ fild_s(src);
598 } else {
599 UNREACHABLE();
600 }
601 }
602
603
604 void LCodeGen::X87Mov(Operand dst, X87Register src) {
605 X87Fxch(src);
516 __ fst_d(dst); 606 __ fst_d(dst);
517 } 607 }
518 608
519 609
520 void LCodeGen::PushX87DoubleOperand(Operand src) { 610 void LCodeGen::X87PrepareToWrite(X87Register reg) {
521 ASSERT(x87_stack_depth_ == 0); 611 if (X87StackContains(reg)) {
522 x87_stack_depth_++; 612 X87Free(reg);
523 __ fld_d(src); 613 }
614 // Mark this register as the next register to write to
615 x87_stack_[x87_stack_depth_] = reg;
524 } 616 }
525 617
526 618
527 void LCodeGen::PushX87FloatOperand(Operand src) { 619 void LCodeGen::X87CommitWrite(X87Register reg) {
528 ASSERT(x87_stack_depth_ == 0); 620 // Assert the reg is prepared to write, but not on the virtual stack yet
621 ASSERT(!X87StackContains(reg) && x87_stack_[x87_stack_depth_].is(reg) &&
622 x87_stack_depth_ < X87Register::kNumAllocatableRegisters);
529 x87_stack_depth_++; 623 x87_stack_depth_++;
530 __ fld_s(src);
531 } 624 }
532 625
533 626
534 void LCodeGen::PopX87() { 627 void LCodeGen::X87PrepareBinaryOp(
535 ASSERT(x87_stack_depth_ == 1); 628 X87Register left, X87Register right, X87Register result) {
536 x87_stack_depth_--; 629 // You need to use DefineSameAsFirst for x87 instructions
537 __ fstp(0); 630 ASSERT(result.is(left));
631 X87Fxch(right, 1);
632 X87Fxch(left);
538 } 633 }
539 634
540 635
541 void LCodeGen::CurrentInstructionReturnsX87Result() { 636 void LCodeGen::FlushX87StackIfNecessary(LInstruction* instr) {
542 ASSERT(x87_stack_depth_ <= 1); 637 if (x87_stack_depth_ > 0 && instr->ClobbersDoubleRegisters()) {
543 if (x87_stack_depth_ == 0) { 638 bool double_inputs = instr->HasDoubleRegisterInput();
544 x87_stack_depth_ = 1; 639
640 // Flush stack from tos down, since FreeX87() will mess with tos
641 for (int i = x87_stack_depth_-1; i >= 0; i--) {
642 X87Register reg = x87_stack_[i];
643 // Skip registers which contain the inputs for the next instruction
644 // when flushing the stack
645 if (double_inputs && instr->IsDoubleInput(reg, this)) {
646 continue;
647 }
648 X87Free(reg);
649 if (i < x87_stack_depth_-1) i++;
650 }
651 }
652 if (instr->IsReturn()) {
653 while (x87_stack_depth_ > 0) {
654 __ fstp(0);
655 x87_stack_depth_--;
656 }
545 } 657 }
546 } 658 }
547 659
548 660
549 void LCodeGen::FlushX87StackIfNecessary(LInstruction* instr) { 661 void LCodeGen::EmitFlushX87ForDeopt() {
550 if (x87_stack_depth_ > 0) { 662 for (int i = 0; i < x87_stack_depth_; i++) __ fstp(0);
551 if ((instr->ClobbersDoubleRegisters() ||
552 instr->HasDoubleRegisterResult()) &&
553 !instr->HasDoubleRegisterInput()) {
554 PopX87();
555 }
556 }
557 } 663 }
558 664
559 665
560 Register LCodeGen::ToRegister(LOperand* op) const { 666 Register LCodeGen::ToRegister(LOperand* op) const {
561 ASSERT(op->IsRegister()); 667 ASSERT(op->IsRegister());
562 return ToRegister(op->index()); 668 return ToRegister(op->index());
563 } 669 }
564 670
565 671
672 X87Register LCodeGen::ToX87Register(LOperand* op) const {
673 ASSERT(op->IsDoubleRegister());
674 return ToX87Register(op->index());
675 }
676
677
566 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const { 678 XMMRegister LCodeGen::ToDoubleRegister(LOperand* op) const {
567 ASSERT(op->IsDoubleRegister()); 679 ASSERT(op->IsDoubleRegister());
568 return ToDoubleRegister(op->index()); 680 return ToDoubleRegister(op->index());
569 } 681 }
570 682
571 683
572 int LCodeGen::ToInteger32(LConstantOperand* op) const { 684 int LCodeGen::ToInteger32(LConstantOperand* op) const {
573 HConstant* constant = chunk_->LookupConstant(op); 685 HConstant* constant = chunk_->LookupConstant(op);
574 return constant->Integer32Value(); 686 return constant->Integer32Value();
575 } 687 }
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after
828 deoptimizations_.Add(environment, zone()); 940 deoptimizations_.Add(environment, zone());
829 } 941 }
830 } 942 }
831 943
832 944
833 void LCodeGen::DeoptimizeIf(Condition cc, 945 void LCodeGen::DeoptimizeIf(Condition cc,
834 LEnvironment* environment, 946 LEnvironment* environment,
835 Deoptimizer::BailoutType bailout_type) { 947 Deoptimizer::BailoutType bailout_type) {
836 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); 948 RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
837 ASSERT(environment->HasBeenRegistered()); 949 ASSERT(environment->HasBeenRegistered());
838 // It's an error to deoptimize with the x87 fp stack in use.
839 ASSERT(x87_stack_depth_ == 0);
840 int id = environment->deoptimization_index(); 950 int id = environment->deoptimization_index();
841 ASSERT(info()->IsOptimizing() || info()->IsStub()); 951 ASSERT(info()->IsOptimizing() || info()->IsStub());
842 Address entry = 952 Address entry =
843 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type); 953 Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
844 if (entry == NULL) { 954 if (entry == NULL) {
845 Abort("bailout was not prepared"); 955 Abort("bailout was not prepared");
846 return; 956 return;
847 } 957 }
848 958
849 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) { 959 if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
(...skipping 17 matching lines...) Expand all
867 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); 977 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
868 978
869 __ bind(&no_deopt); 979 __ bind(&no_deopt);
870 __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset), 980 __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
871 eax); 981 eax);
872 __ pop(ebx); 982 __ pop(ebx);
873 __ pop(eax); 983 __ pop(eax);
874 __ popfd(); 984 __ popfd();
875 } 985 }
876 986
987 // Before Instructions which can deopt, we normally flush the x87 stack. But
988 // we can have inputs or outputs of the current instruction on the stack,
989 // thus we need to flush them here from the physical stack to leave it in a
990 // consistent state.
991 if (x87_stack_depth_ > 0) {
992 Label done;
993 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear);
994 EmitFlushX87ForDeopt();
995 __ bind(&done);
996 }
997
877 if (FLAG_trap_on_deopt && info()->IsOptimizing()) { 998 if (FLAG_trap_on_deopt && info()->IsOptimizing()) {
878 Label done; 999 Label done;
879 if (cc != no_condition) { 1000 if (cc != no_condition) __ j(NegateCondition(cc), &done, Label::kNear);
880 __ j(NegateCondition(cc), &done, Label::kNear);
881 }
882 __ int3(); 1001 __ int3();
883 __ bind(&done); 1002 __ bind(&done);
884 } 1003 }
885 1004
886 ASSERT(info()->IsStub() || frame_is_built_); 1005 ASSERT(info()->IsStub() || frame_is_built_);
887 if (cc == no_condition && frame_is_built_) { 1006 if (cc == no_condition && frame_is_built_) {
888 if (bailout_type == Deoptimizer::LAZY) { 1007 if (bailout_type == Deoptimizer::LAZY) {
889 __ call(entry, RelocInfo::RUNTIME_ENTRY); 1008 __ call(entry, RelocInfo::RUNTIME_ENTRY);
890 } else { 1009 } else {
891 __ jmp(entry, RelocInfo::RUNTIME_ENTRY); 1010 __ jmp(entry, RelocInfo::RUNTIME_ENTRY);
(...skipping 822 matching lines...) Expand 10 before | Expand all | Expand 10 after
1714 } 1833 }
1715 1834
1716 1835
1717 void LCodeGen::DoConstantD(LConstantD* instr) { 1836 void LCodeGen::DoConstantD(LConstantD* instr) {
1718 double v = instr->value(); 1837 double v = instr->value();
1719 uint64_t int_val = BitCast<uint64_t, double>(v); 1838 uint64_t int_val = BitCast<uint64_t, double>(v);
1720 int32_t lower = static_cast<int32_t>(int_val); 1839 int32_t lower = static_cast<int32_t>(int_val);
1721 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt)); 1840 int32_t upper = static_cast<int32_t>(int_val >> (kBitsPerInt));
1722 1841
1723 if (!CpuFeatures::IsSafeForSnapshot(SSE2)) { 1842 if (!CpuFeatures::IsSafeForSnapshot(SSE2)) {
1843 __ push(Immediate(upper));
1724 __ push(Immediate(lower)); 1844 __ push(Immediate(lower));
1725 __ push(Immediate(upper)); 1845 X87Mov(ToX87Register(instr->result()), Operand(esp, 0));
1726 PushX87DoubleOperand(Operand(esp, 0));
1727 __ add(Operand(esp), Immediate(kDoubleSize)); 1846 __ add(Operand(esp), Immediate(kDoubleSize));
1728 CurrentInstructionReturnsX87Result();
1729 } else { 1847 } else {
1730 CpuFeatureScope scope1(masm(), SSE2); 1848 CpuFeatureScope scope1(masm(), SSE2);
1731 ASSERT(instr->result()->IsDoubleRegister()); 1849 ASSERT(instr->result()->IsDoubleRegister());
1732 XMMRegister res = ToDoubleRegister(instr->result()); 1850 XMMRegister res = ToDoubleRegister(instr->result());
1733 if (int_val == 0) { 1851 if (int_val == 0) {
1734 __ xorps(res, res); 1852 __ xorps(res, res);
1735 } else { 1853 } else {
1736 Register temp = ToRegister(instr->temp()); 1854 Register temp = ToRegister(instr->temp());
1737 if (CpuFeatures::IsSupported(SSE4_1)) { 1855 if (CpuFeatures::IsSupported(SSE4_1)) {
1738 CpuFeatureScope scope2(masm(), SSE4_1); 1856 CpuFeatureScope scope2(masm(), SSE4_1);
(...skipping 244 matching lines...) Expand 10 before | Expand all | Expand 10 after
1983 __ j(parity_even, &return_left, Label::kNear); // left == NaN. 2101 __ j(parity_even, &return_left, Label::kNear); // left == NaN.
1984 __ bind(&return_right); 2102 __ bind(&return_right);
1985 __ movsd(left_reg, right_reg); 2103 __ movsd(left_reg, right_reg);
1986 2104
1987 __ bind(&return_left); 2105 __ bind(&return_left);
1988 } 2106 }
1989 } 2107 }
1990 2108
1991 2109
1992 void LCodeGen::DoArithmeticD(LArithmeticD* instr) { 2110 void LCodeGen::DoArithmeticD(LArithmeticD* instr) {
1993 CpuFeatureScope scope(masm(), SSE2); 2111 if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
1994 XMMRegister left = ToDoubleRegister(instr->left()); 2112 CpuFeatureScope scope(masm(), SSE2);
1995 XMMRegister right = ToDoubleRegister(instr->right()); 2113 XMMRegister left = ToDoubleRegister(instr->left());
1996 XMMRegister result = ToDoubleRegister(instr->result()); 2114 XMMRegister right = ToDoubleRegister(instr->right());
1997 // Modulo uses a fixed result register. 2115 XMMRegister result = ToDoubleRegister(instr->result());
1998 ASSERT(instr->op() == Token::MOD || left.is(result)); 2116 // Modulo uses a fixed result register.
1999 switch (instr->op()) { 2117 ASSERT(instr->op() == Token::MOD || left.is(result));
2000 case Token::ADD: 2118 switch (instr->op()) {
2001 __ addsd(left, right); 2119 case Token::ADD:
2002 break; 2120 __ addsd(left, right);
2003 case Token::SUB: 2121 break;
2004 __ subsd(left, right); 2122 case Token::SUB:
2005 break; 2123 __ subsd(left, right);
2006 case Token::MUL: 2124 break;
2007 __ mulsd(left, right); 2125 case Token::MUL:
2008 break; 2126 __ mulsd(left, right);
2009 case Token::DIV: 2127 break;
2010 __ divsd(left, right); 2128 case Token::DIV:
2011 // Don't delete this mov. It may improve performance on some CPUs, 2129 __ divsd(left, right);
2012 // when there is a mulsd depending on the result 2130 // Don't delete this mov. It may improve performance on some CPUs,
2013 __ movaps(left, left); 2131 // when there is a mulsd depending on the result
2014 break; 2132 __ movaps(left, left);
2015 case Token::MOD: { 2133 break;
2016 // Pass two doubles as arguments on the stack. 2134 case Token::MOD: {
2017 __ PrepareCallCFunction(4, eax); 2135 // Pass two doubles as arguments on the stack.
2018 __ movdbl(Operand(esp, 0 * kDoubleSize), left); 2136 __ PrepareCallCFunction(4, eax);
2019 __ movdbl(Operand(esp, 1 * kDoubleSize), right); 2137 __ movdbl(Operand(esp, 0 * kDoubleSize), left);
2020 __ CallCFunction( 2138 __ movdbl(Operand(esp, 1 * kDoubleSize), right);
2021 ExternalReference::double_fp_operation(Token::MOD, isolate()), 2139 __ CallCFunction(
2022 4); 2140 ExternalReference::double_fp_operation(Token::MOD, isolate()),
2141 4);
2023 2142
2024 // Return value is in st(0) on ia32. 2143 // Return value is in st(0) on ia32.
2025 // Store it into the (fixed) result register. 2144 // Store it into the (fixed) result register.
2026 __ sub(Operand(esp), Immediate(kDoubleSize)); 2145 __ sub(Operand(esp), Immediate(kDoubleSize));
2027 __ fstp_d(Operand(esp, 0)); 2146 __ fstp_d(Operand(esp, 0));
2028 __ movdbl(result, Operand(esp, 0)); 2147 __ movdbl(result, Operand(esp, 0));
2029 __ add(Operand(esp), Immediate(kDoubleSize)); 2148 __ add(Operand(esp), Immediate(kDoubleSize));
2030 break; 2149 break;
2150 }
2151 default:
2152 UNREACHABLE();
2153 break;
2031 } 2154 }
2032 default: 2155 } else {
2033 UNREACHABLE(); 2156 X87Register left = ToX87Register(instr->left());
2034 break; 2157 X87Register right = ToX87Register(instr->right());
2158 X87Register result = ToX87Register(instr->result());
2159 X87PrepareBinaryOp(left, right, result);
2160 switch (instr->op()) {
2161 case Token::MUL:
2162 __ fmul_i(1);
2163 break;
2164 default:
2165 UNREACHABLE();
2166 break;
2167 }
2035 } 2168 }
2036 } 2169 }
2037 2170
2038 2171
2039 void LCodeGen::DoNegateNoSSE2D(LNegateNoSSE2D* instr) {
2040 __ push(Immediate(-1));
2041 __ fild_s(Operand(esp, 0));
2042 __ add(esp, Immediate(kPointerSize));
2043 __ fmulp();
2044 CurrentInstructionReturnsX87Result();
2045 }
2046
2047
2048
2049 void LCodeGen::DoArithmeticT(LArithmeticT* instr) { 2172 void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
2050 ASSERT(ToRegister(instr->context()).is(esi)); 2173 ASSERT(ToRegister(instr->context()).is(esi));
2051 ASSERT(ToRegister(instr->left()).is(edx)); 2174 ASSERT(ToRegister(instr->left()).is(edx));
2052 ASSERT(ToRegister(instr->right()).is(eax)); 2175 ASSERT(ToRegister(instr->right()).is(eax));
2053 ASSERT(ToRegister(instr->result()).is(eax)); 2176 ASSERT(ToRegister(instr->result()).is(eax));
2054 2177
2055 BinaryOpStub stub(instr->op(), NO_OVERWRITE); 2178 BinaryOpStub stub(instr->op(), NO_OVERWRITE);
2056 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr); 2179 CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
2057 __ nop(); // Signals no inlined code. 2180 __ nop(); // Signals no inlined code.
2058 } 2181 }
(...skipping 897 matching lines...) Expand 10 before | Expand all | Expand 10 after
2956 HObjectAccess access = instr->hydrogen()->access(); 3079 HObjectAccess access = instr->hydrogen()->access();
2957 int offset = access.offset(); 3080 int offset = access.offset();
2958 Register object = ToRegister(instr->object()); 3081 Register object = ToRegister(instr->object());
2959 if (FLAG_track_double_fields && 3082 if (FLAG_track_double_fields &&
2960 instr->hydrogen()->representation().IsDouble()) { 3083 instr->hydrogen()->representation().IsDouble()) {
2961 if (CpuFeatures::IsSupported(SSE2)) { 3084 if (CpuFeatures::IsSupported(SSE2)) {
2962 CpuFeatureScope scope(masm(), SSE2); 3085 CpuFeatureScope scope(masm(), SSE2);
2963 XMMRegister result = ToDoubleRegister(instr->result()); 3086 XMMRegister result = ToDoubleRegister(instr->result());
2964 __ movdbl(result, FieldOperand(object, offset)); 3087 __ movdbl(result, FieldOperand(object, offset));
2965 } else { 3088 } else {
2966 PushX87DoubleOperand(FieldOperand(object, offset)); 3089 X87Mov(ToX87Register(instr->result()), FieldOperand(object, offset));
2967 CurrentInstructionReturnsX87Result();
2968 } 3090 }
2969 return; 3091 return;
2970 } 3092 }
2971 3093
2972 Register result = ToRegister(instr->result()); 3094 Register result = ToRegister(instr->result());
2973 if (access.IsInobject()) { 3095 if (access.IsInobject()) {
2974 __ mov(result, FieldOperand(object, offset)); 3096 __ mov(result, FieldOperand(object, offset));
2975 } else { 3097 } else {
2976 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset)); 3098 __ mov(result, FieldOperand(object, JSObject::kPropertiesOffset));
2977 __ mov(result, FieldOperand(result, offset)); 3099 __ mov(result, FieldOperand(result, offset));
(...skipping 224 matching lines...) Expand 10 before | Expand all | Expand 10 after
3202 elements_kind, 3324 elements_kind,
3203 0, 3325 0,
3204 instr->additional_index())); 3326 instr->additional_index()));
3205 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) { 3327 if (elements_kind == EXTERNAL_FLOAT_ELEMENTS) {
3206 if (CpuFeatures::IsSupported(SSE2)) { 3328 if (CpuFeatures::IsSupported(SSE2)) {
3207 CpuFeatureScope scope(masm(), SSE2); 3329 CpuFeatureScope scope(masm(), SSE2);
3208 XMMRegister result(ToDoubleRegister(instr->result())); 3330 XMMRegister result(ToDoubleRegister(instr->result()));
3209 __ movss(result, operand); 3331 __ movss(result, operand);
3210 __ cvtss2sd(result, result); 3332 __ cvtss2sd(result, result);
3211 } else { 3333 } else {
3212 PushX87FloatOperand(operand); 3334 X87Mov(ToX87Register(instr->result()), operand, kX87FloatOperand);
3213 CurrentInstructionReturnsX87Result();
3214 } 3335 }
3215 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { 3336 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
3216 if (CpuFeatures::IsSupported(SSE2)) { 3337 if (CpuFeatures::IsSupported(SSE2)) {
3217 CpuFeatureScope scope(masm(), SSE2); 3338 CpuFeatureScope scope(masm(), SSE2);
3218 __ movdbl(ToDoubleRegister(instr->result()), operand); 3339 __ movdbl(ToDoubleRegister(instr->result()), operand);
3219 } else { 3340 } else {
3220 PushX87DoubleOperand(operand); 3341 X87Mov(ToX87Register(instr->result()), operand);
3221 CurrentInstructionReturnsX87Result();
3222 } 3342 }
3223 } else { 3343 } else {
3224 Register result(ToRegister(instr->result())); 3344 Register result(ToRegister(instr->result()));
3225 switch (elements_kind) { 3345 switch (elements_kind) {
3226 case EXTERNAL_BYTE_ELEMENTS: 3346 case EXTERNAL_BYTE_ELEMENTS:
3227 __ movsx_b(result, operand); 3347 __ movsx_b(result, operand);
3228 break; 3348 break;
3229 case EXTERNAL_PIXEL_ELEMENTS: 3349 case EXTERNAL_PIXEL_ELEMENTS:
3230 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 3350 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
3231 __ movzx_b(result, operand); 3351 __ movzx_b(result, operand);
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
3282 instr->key(), 3402 instr->key(),
3283 instr->hydrogen()->key()->representation(), 3403 instr->hydrogen()->key()->representation(),
3284 FAST_DOUBLE_ELEMENTS, 3404 FAST_DOUBLE_ELEMENTS,
3285 FixedDoubleArray::kHeaderSize - kHeapObjectTag, 3405 FixedDoubleArray::kHeaderSize - kHeapObjectTag,
3286 instr->additional_index()); 3406 instr->additional_index());
3287 if (CpuFeatures::IsSupported(SSE2)) { 3407 if (CpuFeatures::IsSupported(SSE2)) {
3288 CpuFeatureScope scope(masm(), SSE2); 3408 CpuFeatureScope scope(masm(), SSE2);
3289 XMMRegister result = ToDoubleRegister(instr->result()); 3409 XMMRegister result = ToDoubleRegister(instr->result());
3290 __ movdbl(result, double_load_operand); 3410 __ movdbl(result, double_load_operand);
3291 } else { 3411 } else {
3292 PushX87DoubleOperand(double_load_operand); 3412 X87Mov(ToX87Register(instr->result()), double_load_operand);
3293 CurrentInstructionReturnsX87Result();
3294 } 3413 }
3295 } 3414 }
3296 3415
3297 3416
3298 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) { 3417 void LCodeGen::DoLoadKeyedFixedArray(LLoadKeyed* instr) {
3299 Register result = ToRegister(instr->result()); 3418 Register result = ToRegister(instr->result());
3300 3419
3301 // Load the result. 3420 // Load the result.
3302 __ mov(result, 3421 __ mov(result,
3303 BuildFastArrayOperand(instr->elements(), 3422 BuildFastArrayOperand(instr->elements(),
(...skipping 973 matching lines...) Expand 10 before | Expand all | Expand 10 after
4277 } 4396 }
4278 } else if (FLAG_track_double_fields && representation.IsDouble()) { 4397 } else if (FLAG_track_double_fields && representation.IsDouble()) {
4279 ASSERT(transition.is_null()); 4398 ASSERT(transition.is_null());
4280 ASSERT(access.IsInobject()); 4399 ASSERT(access.IsInobject());
4281 ASSERT(!instr->hydrogen()->NeedsWriteBarrier()); 4400 ASSERT(!instr->hydrogen()->NeedsWriteBarrier());
4282 if (CpuFeatures::IsSupported(SSE2)) { 4401 if (CpuFeatures::IsSupported(SSE2)) {
4283 CpuFeatureScope scope(masm(), SSE2); 4402 CpuFeatureScope scope(masm(), SSE2);
4284 XMMRegister value = ToDoubleRegister(instr->value()); 4403 XMMRegister value = ToDoubleRegister(instr->value());
4285 __ movdbl(FieldOperand(object, offset), value); 4404 __ movdbl(FieldOperand(object, offset), value);
4286 } else { 4405 } else {
4287 __ fstp_d(FieldOperand(object, offset)); 4406 X87Register value = ToX87Register(instr->value());
4407 X87Mov(FieldOperand(object, offset), value);
4288 } 4408 }
4289 return; 4409 return;
4290 } 4410 }
4291 4411
4292 if (!transition.is_null()) { 4412 if (!transition.is_null()) {
4293 if (!instr->hydrogen()->NeedsWriteBarrierForMap()) { 4413 if (!instr->hydrogen()->NeedsWriteBarrierForMap()) {
4294 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition); 4414 __ mov(FieldOperand(object, HeapObject::kMapOffset), transition);
4295 } else { 4415 } else {
4296 Register temp = ToRegister(instr->temp()); 4416 Register temp = ToRegister(instr->temp());
4297 Register temp_map = ToRegister(instr->temp_map()); 4417 Register temp_map = ToRegister(instr->temp_map());
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
4403 __ movss(operand, xmm0); 4523 __ movss(operand, xmm0);
4404 } else { 4524 } else {
4405 __ fld(0); 4525 __ fld(0);
4406 __ fstp_s(operand); 4526 __ fstp_s(operand);
4407 } 4527 }
4408 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) { 4528 } else if (elements_kind == EXTERNAL_DOUBLE_ELEMENTS) {
4409 if (CpuFeatures::IsSafeForSnapshot(SSE2)) { 4529 if (CpuFeatures::IsSafeForSnapshot(SSE2)) {
4410 CpuFeatureScope scope(masm(), SSE2); 4530 CpuFeatureScope scope(masm(), SSE2);
4411 __ movdbl(operand, ToDoubleRegister(instr->value())); 4531 __ movdbl(operand, ToDoubleRegister(instr->value()));
4412 } else { 4532 } else {
4413 __ fst_d(operand); 4533 X87Mov(operand, ToX87Register(instr->value()));
4414 } 4534 }
4415 } else { 4535 } else {
4416 Register value = ToRegister(instr->value()); 4536 Register value = ToRegister(instr->value());
4417 switch (elements_kind) { 4537 switch (elements_kind) {
4418 case EXTERNAL_PIXEL_ELEMENTS: 4538 case EXTERNAL_PIXEL_ELEMENTS:
4419 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS: 4539 case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
4420 case EXTERNAL_BYTE_ELEMENTS: 4540 case EXTERNAL_BYTE_ELEMENTS:
4421 __ mov_b(operand, value); 4541 __ mov_b(operand, value);
4422 break; 4542 break;
4423 case EXTERNAL_SHORT_ELEMENTS: 4543 case EXTERNAL_SHORT_ELEMENTS:
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after
4485 Operand double_store_operand2 = BuildFastArrayOperand( 4605 Operand double_store_operand2 = BuildFastArrayOperand(
4486 instr->elements(), 4606 instr->elements(),
4487 instr->key(), 4607 instr->key(),
4488 instr->hydrogen()->key()->representation(), 4608 instr->hydrogen()->key()->representation(),
4489 FAST_DOUBLE_ELEMENTS, 4609 FAST_DOUBLE_ELEMENTS,
4490 FixedDoubleArray::kHeaderSize - kHeapObjectTag + kPointerSize, 4610 FixedDoubleArray::kHeaderSize - kHeapObjectTag + kPointerSize,
4491 instr->additional_index()); 4611 instr->additional_index());
4492 __ mov(double_store_operand2, Immediate(upper)); 4612 __ mov(double_store_operand2, Immediate(upper));
4493 } else { 4613 } else {
4494 Label no_special_nan_handling; 4614 Label no_special_nan_handling;
4495 ASSERT(x87_stack_depth_ > 0); 4615 X87Register value = ToX87Register(instr->value());
4616 X87Fxch(value);
4496 4617
4497 if (instr->NeedsCanonicalization()) { 4618 if (instr->NeedsCanonicalization()) {
4498 __ fld(0); 4619 __ fld(0);
4499 __ fld(0); 4620 __ fld(0);
4500 __ FCmp(); 4621 __ FCmp();
4501 4622
4502 __ j(parity_odd, &no_special_nan_handling); 4623 __ j(parity_odd, &no_special_nan_handling);
4503 __ sub(esp, Immediate(kDoubleSize)); 4624 __ sub(esp, Immediate(kDoubleSize));
4504 __ fst_d(MemOperand(esp, 0)); 4625 __ fst_d(MemOperand(esp, 0));
4505 __ cmp(MemOperand(esp, sizeof(kHoleNanLower32)), 4626 __ cmp(MemOperand(esp, sizeof(kHoleNanLower32)),
(...skipping 449 matching lines...) Expand 10 before | Expand all | Expand 10 after
4955 5076
4956 Register reg = ToRegister(instr->result()); 5077 Register reg = ToRegister(instr->result());
4957 5078
4958 bool convert_hole = false; 5079 bool convert_hole = false;
4959 HValue* change_input = instr->hydrogen()->value(); 5080 HValue* change_input = instr->hydrogen()->value();
4960 if (change_input->IsLoadKeyed()) { 5081 if (change_input->IsLoadKeyed()) {
4961 HLoadKeyed* load = HLoadKeyed::cast(change_input); 5082 HLoadKeyed* load = HLoadKeyed::cast(change_input);
4962 convert_hole = load->UsesMustHandleHole(); 5083 convert_hole = load->UsesMustHandleHole();
4963 } 5084 }
4964 5085
5086 bool use_sse2 = CpuFeatures::IsSupported(SSE2);
5087 if (!use_sse2) {
5088 // Put the value to the top of stack
5089 X87Register src = ToX87Register(instr->value());
5090 X87LoadForUsage(src);
5091 }
5092
4965 Label no_special_nan_handling; 5093 Label no_special_nan_handling;
4966 Label done; 5094 Label done;
4967 if (convert_hole) { 5095 if (convert_hole) {
4968 bool use_sse2 = CpuFeatures::IsSupported(SSE2);
4969 if (use_sse2) { 5096 if (use_sse2) {
4970 CpuFeatureScope scope(masm(), SSE2); 5097 CpuFeatureScope scope(masm(), SSE2);
4971 XMMRegister input_reg = ToDoubleRegister(instr->value()); 5098 XMMRegister input_reg = ToDoubleRegister(instr->value());
4972 __ ucomisd(input_reg, input_reg); 5099 __ ucomisd(input_reg, input_reg);
4973 } else { 5100 } else {
4974 __ fld(0); 5101 __ fld(0);
4975 __ fld(0);
4976 __ FCmp(); 5102 __ FCmp();
4977 } 5103 }
4978 5104
4979 __ j(parity_odd, &no_special_nan_handling); 5105 __ j(parity_odd, &no_special_nan_handling);
4980 __ sub(esp, Immediate(kDoubleSize)); 5106 __ sub(esp, Immediate(kDoubleSize));
4981 if (use_sse2) { 5107 if (use_sse2) {
4982 CpuFeatureScope scope(masm(), SSE2); 5108 CpuFeatureScope scope(masm(), SSE2);
4983 XMMRegister input_reg = ToDoubleRegister(instr->value()); 5109 XMMRegister input_reg = ToDoubleRegister(instr->value());
4984 __ movdbl(MemOperand(esp, 0), input_reg); 5110 __ movdbl(MemOperand(esp, 0), input_reg);
4985 } else { 5111 } else {
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
5019 __ jmp(deferred->entry()); 5145 __ jmp(deferred->entry());
5020 } 5146 }
5021 __ bind(deferred->exit()); 5147 __ bind(deferred->exit());
5022 if (CpuFeatures::IsSupported(SSE2)) { 5148 if (CpuFeatures::IsSupported(SSE2)) {
5023 CpuFeatureScope scope(masm(), SSE2); 5149 CpuFeatureScope scope(masm(), SSE2);
5024 XMMRegister input_reg = ToDoubleRegister(instr->value()); 5150 XMMRegister input_reg = ToDoubleRegister(instr->value());
5025 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg); 5151 __ movdbl(FieldOperand(reg, HeapNumber::kValueOffset), input_reg);
5026 } else { 5152 } else {
5027 __ fst_d(FieldOperand(reg, HeapNumber::kValueOffset)); 5153 __ fst_d(FieldOperand(reg, HeapNumber::kValueOffset));
5028 } 5154 }
5155 if (!use_sse2) {
5156 // clean up the stack
5157 __ fstp(0);
5158 }
5029 __ bind(&done); 5159 __ bind(&done);
5030 } 5160 }
5031 5161
5032 5162
5033 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) { 5163 void LCodeGen::DoDeferredNumberTagD(LNumberTagD* instr) {
5034 // TODO(3095996): Get rid of this. For now, we need to make the 5164 // TODO(3095996): Get rid of this. For now, we need to make the
5035 // result register contain a valid pointer because it is already 5165 // result register contain a valid pointer because it is already
5036 // contained in the register pointer map. 5166 // contained in the register pointer map.
5037 Register reg = ToRegister(instr->result()); 5167 Register reg = ToRegister(instr->result());
5038 __ Set(reg, Immediate(0)); 5168 __ Set(reg, Immediate(0));
(...skipping 29 matching lines...) Expand all
5068 DeoptimizeIf(not_zero, instr->environment()); 5198 DeoptimizeIf(not_zero, instr->environment());
5069 } else { 5199 } else {
5070 __ AssertSmi(result); 5200 __ AssertSmi(result);
5071 } 5201 }
5072 __ SmiUntag(result); 5202 __ SmiUntag(result);
5073 } 5203 }
5074 5204
5075 5205
5076 void LCodeGen::EmitNumberUntagDNoSSE2(Register input_reg, 5206 void LCodeGen::EmitNumberUntagDNoSSE2(Register input_reg,
5077 Register temp_reg, 5207 Register temp_reg,
5208 X87Register res_reg,
5078 bool allow_undefined_as_nan, 5209 bool allow_undefined_as_nan,
5079 bool deoptimize_on_minus_zero, 5210 bool deoptimize_on_minus_zero,
5080 LEnvironment* env, 5211 LEnvironment* env,
5081 NumberUntagDMode mode) { 5212 NumberUntagDMode mode) {
5082 Label load_smi, done; 5213 Label load_smi, done;
5083 5214
5215 X87PrepareToWrite(res_reg);
5084 STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE > 5216 STATIC_ASSERT(NUMBER_CANDIDATE_IS_ANY_TAGGED_CONVERT_HOLE >
5085 NUMBER_CANDIDATE_IS_ANY_TAGGED); 5217 NUMBER_CANDIDATE_IS_ANY_TAGGED);
5086 if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) { 5218 if (mode >= NUMBER_CANDIDATE_IS_ANY_TAGGED) {
5087 // Smi check. 5219 // Smi check.
5088 __ JumpIfSmi(input_reg, &load_smi, Label::kNear); 5220 __ JumpIfSmi(input_reg, &load_smi, Label::kNear);
5089 5221
5090 // Heap number map check. 5222 // Heap number map check.
5091 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset), 5223 __ cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
5092 factory()->heap_number_map()); 5224 factory()->heap_number_map());
5093 if (!allow_undefined_as_nan) { 5225 if (!allow_undefined_as_nan) {
(...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after
5134 ASSERT(mode == NUMBER_CANDIDATE_IS_SMI); 5266 ASSERT(mode == NUMBER_CANDIDATE_IS_SMI);
5135 } 5267 }
5136 5268
5137 __ bind(&load_smi); 5269 __ bind(&load_smi);
5138 __ SmiUntag(input_reg); // Untag smi before converting to float. 5270 __ SmiUntag(input_reg); // Untag smi before converting to float.
5139 __ push(input_reg); 5271 __ push(input_reg);
5140 __ fild_s(Operand(esp, 0)); 5272 __ fild_s(Operand(esp, 0));
5141 __ pop(input_reg); 5273 __ pop(input_reg);
5142 __ SmiTag(input_reg); // Retag smi. 5274 __ SmiTag(input_reg); // Retag smi.
5143 __ bind(&done); 5275 __ bind(&done);
5276 X87CommitWrite(res_reg);
5144 } 5277 }
5145 5278
5146 5279
5147 void LCodeGen::EmitNumberUntagD(Register input_reg, 5280 void LCodeGen::EmitNumberUntagD(Register input_reg,
5148 Register temp_reg, 5281 Register temp_reg,
5149 XMMRegister result_reg, 5282 XMMRegister result_reg,
5150 bool allow_undefined_as_nan, 5283 bool allow_undefined_as_nan,
5151 bool deoptimize_on_minus_zero, 5284 bool deoptimize_on_minus_zero,
5152 LEnvironment* env, 5285 LEnvironment* env,
5153 NumberUntagDMode mode) { 5286 NumberUntagDMode mode) {
(...skipping 361 matching lines...) Expand 10 before | Expand all | Expand 10 after
5515 EmitNumberUntagD(input_reg, 5648 EmitNumberUntagD(input_reg,
5516 temp_reg, 5649 temp_reg,
5517 result_reg, 5650 result_reg,
5518 instr->hydrogen()->allow_undefined_as_nan(), 5651 instr->hydrogen()->allow_undefined_as_nan(),
5519 deoptimize_on_minus_zero, 5652 deoptimize_on_minus_zero,
5520 instr->environment(), 5653 instr->environment(),
5521 mode); 5654 mode);
5522 } else { 5655 } else {
5523 EmitNumberUntagDNoSSE2(input_reg, 5656 EmitNumberUntagDNoSSE2(input_reg,
5524 temp_reg, 5657 temp_reg,
5658 ToX87Register(instr->result()),
5525 instr->hydrogen()->allow_undefined_as_nan(), 5659 instr->hydrogen()->allow_undefined_as_nan(),
5526 deoptimize_on_minus_zero, 5660 deoptimize_on_minus_zero,
5527 instr->environment(), 5661 instr->environment(),
5528 mode); 5662 mode);
5529 CurrentInstructionReturnsX87Result();
5530 } 5663 }
5531 } 5664 }
5532 5665
5533 5666
5534 void LCodeGen::DoDoubleToI(LDoubleToI* instr) { 5667 void LCodeGen::DoDoubleToI(LDoubleToI* instr) {
5535 LOperand* input = instr->value(); 5668 LOperand* input = instr->value();
5536 ASSERT(input->IsDoubleRegister()); 5669 ASSERT(input->IsDoubleRegister());
5537 LOperand* result = instr->result(); 5670 LOperand* result = instr->result();
5538 ASSERT(result->IsRegister()); 5671 ASSERT(result->IsRegister());
5539 CpuFeatureScope scope(masm(), SSE2); 5672 CpuFeatureScope scope(masm(), SSE2);
(...skipping 1001 matching lines...) Expand 10 before | Expand all | Expand 10 after
6541 FixedArray::kHeaderSize - kPointerSize)); 6674 FixedArray::kHeaderSize - kPointerSize));
6542 __ bind(&done); 6675 __ bind(&done);
6543 } 6676 }
6544 6677
6545 6678
6546 #undef __ 6679 #undef __
6547 6680
6548 } } // namespace v8::internal 6681 } } // namespace v8::internal
6549 6682
6550 #endif // V8_TARGET_ARCH_IA32 6683 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/lithium-codegen-ia32.h ('k') | src/ia32/lithium-gap-resolver-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698