Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(539)

Side by Side Diff: src/x64/lithium-codegen-x64.cc

Issue 6928060: Merge Label and NearLabel (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: address comments Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/x64/full-codegen-x64.cc ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 767 matching lines...) Expand 10 before | Expand all | Expand 10 after
778 778
779 void LCodeGen::DoModI(LModI* instr) { 779 void LCodeGen::DoModI(LModI* instr) {
780 if (instr->hydrogen()->HasPowerOf2Divisor()) { 780 if (instr->hydrogen()->HasPowerOf2Divisor()) {
781 Register dividend = ToRegister(instr->InputAt(0)); 781 Register dividend = ToRegister(instr->InputAt(0));
782 782
783 int32_t divisor = 783 int32_t divisor =
784 HConstant::cast(instr->hydrogen()->right())->Integer32Value(); 784 HConstant::cast(instr->hydrogen()->right())->Integer32Value();
785 785
786 if (divisor < 0) divisor = -divisor; 786 if (divisor < 0) divisor = -divisor;
787 787
788 NearLabel positive_dividend, done; 788 Label positive_dividend, done;
789 __ testl(dividend, dividend); 789 __ testl(dividend, dividend);
790 __ j(not_sign, &positive_dividend); 790 __ j(not_sign, &positive_dividend, Label::kNear);
791 __ negl(dividend); 791 __ negl(dividend);
792 __ andl(dividend, Immediate(divisor - 1)); 792 __ andl(dividend, Immediate(divisor - 1));
793 __ negl(dividend); 793 __ negl(dividend);
794 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 794 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
795 __ j(not_zero, &done); 795 __ j(not_zero, &done, Label::kNear);
796 DeoptimizeIf(no_condition, instr->environment()); 796 DeoptimizeIf(no_condition, instr->environment());
797 } 797 }
798 __ bind(&positive_dividend); 798 __ bind(&positive_dividend);
799 __ andl(dividend, Immediate(divisor - 1)); 799 __ andl(dividend, Immediate(divisor - 1));
800 __ bind(&done); 800 __ bind(&done);
801 } else { 801 } else {
802 NearLabel done, remainder_eq_dividend, slow, do_subtraction, both_positive; 802 Label done, remainder_eq_dividend, slow, do_subtraction, both_positive;
803 Register left_reg = ToRegister(instr->InputAt(0)); 803 Register left_reg = ToRegister(instr->InputAt(0));
804 Register right_reg = ToRegister(instr->InputAt(1)); 804 Register right_reg = ToRegister(instr->InputAt(1));
805 Register result_reg = ToRegister(instr->result()); 805 Register result_reg = ToRegister(instr->result());
806 806
807 ASSERT(left_reg.is(rax)); 807 ASSERT(left_reg.is(rax));
808 ASSERT(result_reg.is(rdx)); 808 ASSERT(result_reg.is(rdx));
809 ASSERT(!right_reg.is(rax)); 809 ASSERT(!right_reg.is(rax));
810 ASSERT(!right_reg.is(rdx)); 810 ASSERT(!right_reg.is(rdx));
811 811
812 // Check for x % 0. 812 // Check for x % 0.
813 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { 813 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
814 __ testl(right_reg, right_reg); 814 __ testl(right_reg, right_reg);
815 DeoptimizeIf(zero, instr->environment()); 815 DeoptimizeIf(zero, instr->environment());
816 } 816 }
817 817
818 __ testl(left_reg, left_reg); 818 __ testl(left_reg, left_reg);
819 __ j(zero, &remainder_eq_dividend); 819 __ j(zero, &remainder_eq_dividend, Label::kNear);
820 __ j(sign, &slow); 820 __ j(sign, &slow, Label::kNear);
821 821
822 __ testl(right_reg, right_reg); 822 __ testl(right_reg, right_reg);
823 __ j(not_sign, &both_positive); 823 __ j(not_sign, &both_positive, Label::kNear);
824 // The sign of the divisor doesn't matter. 824 // The sign of the divisor doesn't matter.
825 __ neg(right_reg); 825 __ neg(right_reg);
826 826
827 __ bind(&both_positive); 827 __ bind(&both_positive);
828 // If the dividend is smaller than the nonnegative 828 // If the dividend is smaller than the nonnegative
829 // divisor, the dividend is the result. 829 // divisor, the dividend is the result.
830 __ cmpl(left_reg, right_reg); 830 __ cmpl(left_reg, right_reg);
831 __ j(less, &remainder_eq_dividend); 831 __ j(less, &remainder_eq_dividend, Label::kNear);
832 832
833 // Check if the divisor is a PowerOfTwo integer. 833 // Check if the divisor is a PowerOfTwo integer.
834 Register scratch = ToRegister(instr->TempAt(0)); 834 Register scratch = ToRegister(instr->TempAt(0));
835 __ movl(scratch, right_reg); 835 __ movl(scratch, right_reg);
836 __ subl(scratch, Immediate(1)); 836 __ subl(scratch, Immediate(1));
837 __ testl(scratch, right_reg); 837 __ testl(scratch, right_reg);
838 __ j(not_zero, &do_subtraction); 838 __ j(not_zero, &do_subtraction, Label::kNear);
839 __ andl(left_reg, scratch); 839 __ andl(left_reg, scratch);
840 __ jmp(&remainder_eq_dividend); 840 __ jmp(&remainder_eq_dividend, Label::kNear);
841 841
842 __ bind(&do_subtraction); 842 __ bind(&do_subtraction);
843 const int kUnfolds = 3; 843 const int kUnfolds = 3;
844 // Try a few subtractions of the dividend. 844 // Try a few subtractions of the dividend.
845 __ movl(scratch, left_reg); 845 __ movl(scratch, left_reg);
846 for (int i = 0; i < kUnfolds; i++) { 846 for (int i = 0; i < kUnfolds; i++) {
847 // Reduce the dividend by the divisor. 847 // Reduce the dividend by the divisor.
848 __ subl(left_reg, right_reg); 848 __ subl(left_reg, right_reg);
849 // Check if the dividend is less than the divisor. 849 // Check if the dividend is less than the divisor.
850 __ cmpl(left_reg, right_reg); 850 __ cmpl(left_reg, right_reg);
851 __ j(less, &remainder_eq_dividend); 851 __ j(less, &remainder_eq_dividend, Label::kNear);
852 } 852 }
853 __ movl(left_reg, scratch); 853 __ movl(left_reg, scratch);
854 854
855 // Slow case, using idiv instruction. 855 // Slow case, using idiv instruction.
856 __ bind(&slow); 856 __ bind(&slow);
857 // Sign extend eax to edx. 857 // Sign extend eax to edx.
858 // (We are using only the low 32 bits of the values.) 858 // (We are using only the low 32 bits of the values.)
859 __ cdq(); 859 __ cdq();
860 860
861 // Check for (0 % -x) that will produce negative zero. 861 // Check for (0 % -x) that will produce negative zero.
862 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 862 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
863 NearLabel positive_left; 863 Label positive_left;
864 NearLabel done; 864 Label done;
865 __ testl(left_reg, left_reg); 865 __ testl(left_reg, left_reg);
866 __ j(not_sign, &positive_left); 866 __ j(not_sign, &positive_left, Label::kNear);
867 __ idivl(right_reg); 867 __ idivl(right_reg);
868 868
869 // Test the remainder for 0, because then the result would be -0. 869 // Test the remainder for 0, because then the result would be -0.
870 __ testl(result_reg, result_reg); 870 __ testl(result_reg, result_reg);
871 __ j(not_zero, &done); 871 __ j(not_zero, &done, Label::kNear);
872 872
873 DeoptimizeIf(no_condition, instr->environment()); 873 DeoptimizeIf(no_condition, instr->environment());
874 __ bind(&positive_left); 874 __ bind(&positive_left);
875 __ idivl(right_reg); 875 __ idivl(right_reg);
876 __ bind(&done); 876 __ bind(&done);
877 } else { 877 } else {
878 __ idivl(right_reg); 878 __ idivl(right_reg);
879 } 879 }
880 __ jmp(&done); 880 __ jmp(&done, Label::kNear);
881 881
882 __ bind(&remainder_eq_dividend); 882 __ bind(&remainder_eq_dividend);
883 __ movl(result_reg, left_reg); 883 __ movl(result_reg, left_reg);
884 884
885 __ bind(&done); 885 __ bind(&done);
886 } 886 }
887 } 887 }
888 888
889 889
890 void LCodeGen::DoDivI(LDivI* instr) { 890 void LCodeGen::DoDivI(LDivI* instr) {
891 LOperand* right = instr->InputAt(1); 891 LOperand* right = instr->InputAt(1);
892 ASSERT(ToRegister(instr->result()).is(rax)); 892 ASSERT(ToRegister(instr->result()).is(rax));
893 ASSERT(ToRegister(instr->InputAt(0)).is(rax)); 893 ASSERT(ToRegister(instr->InputAt(0)).is(rax));
894 ASSERT(!ToRegister(instr->InputAt(1)).is(rax)); 894 ASSERT(!ToRegister(instr->InputAt(1)).is(rax));
895 ASSERT(!ToRegister(instr->InputAt(1)).is(rdx)); 895 ASSERT(!ToRegister(instr->InputAt(1)).is(rdx));
896 896
897 Register left_reg = rax; 897 Register left_reg = rax;
898 898
899 // Check for x / 0. 899 // Check for x / 0.
900 Register right_reg = ToRegister(right); 900 Register right_reg = ToRegister(right);
901 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) { 901 if (instr->hydrogen()->CheckFlag(HValue::kCanBeDivByZero)) {
902 __ testl(right_reg, right_reg); 902 __ testl(right_reg, right_reg);
903 DeoptimizeIf(zero, instr->environment()); 903 DeoptimizeIf(zero, instr->environment());
904 } 904 }
905 905
906 // Check for (0 / -x) that will produce negative zero. 906 // Check for (0 / -x) that will produce negative zero.
907 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 907 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
908 NearLabel left_not_zero; 908 Label left_not_zero;
909 __ testl(left_reg, left_reg); 909 __ testl(left_reg, left_reg);
910 __ j(not_zero, &left_not_zero); 910 __ j(not_zero, &left_not_zero, Label::kNear);
911 __ testl(right_reg, right_reg); 911 __ testl(right_reg, right_reg);
912 DeoptimizeIf(sign, instr->environment()); 912 DeoptimizeIf(sign, instr->environment());
913 __ bind(&left_not_zero); 913 __ bind(&left_not_zero);
914 } 914 }
915 915
916 // Check for (-kMinInt / -1). 916 // Check for (-kMinInt / -1).
917 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) { 917 if (instr->hydrogen()->CheckFlag(HValue::kCanOverflow)) {
918 NearLabel left_not_min_int; 918 Label left_not_min_int;
919 __ cmpl(left_reg, Immediate(kMinInt)); 919 __ cmpl(left_reg, Immediate(kMinInt));
920 __ j(not_zero, &left_not_min_int); 920 __ j(not_zero, &left_not_min_int, Label::kNear);
921 __ cmpl(right_reg, Immediate(-1)); 921 __ cmpl(right_reg, Immediate(-1));
922 DeoptimizeIf(zero, instr->environment()); 922 DeoptimizeIf(zero, instr->environment());
923 __ bind(&left_not_min_int); 923 __ bind(&left_not_min_int);
924 } 924 }
925 925
926 // Sign extend to rdx. 926 // Sign extend to rdx.
927 __ cdq(); 927 __ cdq();
928 __ idivl(right_reg); 928 __ idivl(right_reg);
929 929
930 // Deoptimize if remainder is not 0. 930 // Deoptimize if remainder is not 0.
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
989 } else { 989 } else {
990 __ imull(left, ToRegister(right)); 990 __ imull(left, ToRegister(right));
991 } 991 }
992 992
993 if (can_overflow) { 993 if (can_overflow) {
994 DeoptimizeIf(overflow, instr->environment()); 994 DeoptimizeIf(overflow, instr->environment());
995 } 995 }
996 996
997 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 997 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
998 // Bail out if the result is supposed to be negative zero. 998 // Bail out if the result is supposed to be negative zero.
999 NearLabel done; 999 Label done;
1000 __ testl(left, left); 1000 __ testl(left, left);
1001 __ j(not_zero, &done); 1001 __ j(not_zero, &done, Label::kNear);
1002 if (right->IsConstantOperand()) { 1002 if (right->IsConstantOperand()) {
1003 if (ToInteger32(LConstantOperand::cast(right)) <= 0) { 1003 if (ToInteger32(LConstantOperand::cast(right)) <= 0) {
1004 DeoptimizeIf(no_condition, instr->environment()); 1004 DeoptimizeIf(no_condition, instr->environment());
1005 } 1005 }
1006 } else if (right->IsStackSlot()) { 1006 } else if (right->IsStackSlot()) {
1007 __ or_(kScratchRegister, ToOperand(right)); 1007 __ or_(kScratchRegister, ToOperand(right));
1008 DeoptimizeIf(sign, instr->environment()); 1008 DeoptimizeIf(sign, instr->environment());
1009 } else { 1009 } else {
1010 // Test the non-zero operand for negative sign. 1010 // Test the non-zero operand for negative sign.
1011 __ or_(kScratchRegister, ToRegister(right)); 1011 __ or_(kScratchRegister, ToRegister(right));
(...skipping 348 matching lines...) Expand 10 before | Expand all | Expand 10 after
1360 __ j(equal, false_label); 1360 __ j(equal, false_label);
1361 __ CompareRoot(reg, Heap::kTrueValueRootIndex); 1361 __ CompareRoot(reg, Heap::kTrueValueRootIndex);
1362 __ j(equal, true_label); 1362 __ j(equal, true_label);
1363 __ CompareRoot(reg, Heap::kFalseValueRootIndex); 1363 __ CompareRoot(reg, Heap::kFalseValueRootIndex);
1364 __ j(equal, false_label); 1364 __ j(equal, false_label);
1365 __ Cmp(reg, Smi::FromInt(0)); 1365 __ Cmp(reg, Smi::FromInt(0));
1366 __ j(equal, false_label); 1366 __ j(equal, false_label);
1367 __ JumpIfSmi(reg, true_label); 1367 __ JumpIfSmi(reg, true_label);
1368 1368
1369 // Test for double values. Plus/minus zero and NaN are false. 1369 // Test for double values. Plus/minus zero and NaN are false.
1370 NearLabel call_stub; 1370 Label call_stub;
1371 __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset), 1371 __ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
1372 Heap::kHeapNumberMapRootIndex); 1372 Heap::kHeapNumberMapRootIndex);
1373 __ j(not_equal, &call_stub); 1373 __ j(not_equal, &call_stub, Label::kNear);
1374 1374
1375 // HeapNumber => false iff +0, -0, or NaN. These three cases set the 1375 // HeapNumber => false iff +0, -0, or NaN. These three cases set the
1376 // zero flag when compared to zero using ucomisd. 1376 // zero flag when compared to zero using ucomisd.
1377 __ xorps(xmm0, xmm0); 1377 __ xorps(xmm0, xmm0);
1378 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); 1378 __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
1379 __ j(zero, false_label); 1379 __ j(zero, false_label);
1380 __ jmp(true_label); 1380 __ jmp(true_label);
1381 1381
1382 // The conversion stub doesn't cause garbage collections so it's 1382 // The conversion stub doesn't cause garbage collections so it's
1383 // safe to not record a safepoint after the call. 1383 // safe to not record a safepoint after the call.
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after
1477 __ cmpl(ToRegister(left), ToOperand(right)); 1477 __ cmpl(ToRegister(left), ToOperand(right));
1478 } 1478 }
1479 } 1479 }
1480 1480
1481 1481
1482 void LCodeGen::DoCmpID(LCmpID* instr) { 1482 void LCodeGen::DoCmpID(LCmpID* instr) {
1483 LOperand* left = instr->InputAt(0); 1483 LOperand* left = instr->InputAt(0);
1484 LOperand* right = instr->InputAt(1); 1484 LOperand* right = instr->InputAt(1);
1485 LOperand* result = instr->result(); 1485 LOperand* result = instr->result();
1486 1486
1487 NearLabel unordered; 1487 Label unordered;
1488 if (instr->is_double()) { 1488 if (instr->is_double()) {
1489 // Don't base result on EFLAGS when a NaN is involved. Instead 1489 // Don't base result on EFLAGS when a NaN is involved. Instead
1490 // jump to the unordered case, which produces a false value. 1490 // jump to the unordered case, which produces a false value.
1491 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); 1491 __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right));
1492 __ j(parity_even, &unordered); 1492 __ j(parity_even, &unordered, Label::kNear);
1493 } else { 1493 } else {
1494 EmitCmpI(left, right); 1494 EmitCmpI(left, right);
1495 } 1495 }
1496 1496
1497 NearLabel done; 1497 Label done;
1498 Condition cc = TokenToCondition(instr->op(), instr->is_double()); 1498 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1499 __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex); 1499 __ LoadRoot(ToRegister(result), Heap::kTrueValueRootIndex);
1500 __ j(cc, &done); 1500 __ j(cc, &done, Label::kNear);
1501 1501
1502 __ bind(&unordered); 1502 __ bind(&unordered);
1503 __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex); 1503 __ LoadRoot(ToRegister(result), Heap::kFalseValueRootIndex);
1504 __ bind(&done); 1504 __ bind(&done);
1505 } 1505 }
1506 1506
1507 1507
1508 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { 1508 void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) {
1509 LOperand* left = instr->InputAt(0); 1509 LOperand* left = instr->InputAt(0);
1510 LOperand* right = instr->InputAt(1); 1510 LOperand* right = instr->InputAt(1);
(...skipping 12 matching lines...) Expand all
1523 Condition cc = TokenToCondition(instr->op(), instr->is_double()); 1523 Condition cc = TokenToCondition(instr->op(), instr->is_double());
1524 EmitBranch(true_block, false_block, cc); 1524 EmitBranch(true_block, false_block, cc);
1525 } 1525 }
1526 1526
1527 1527
1528 void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) { 1528 void LCodeGen::DoCmpJSObjectEq(LCmpJSObjectEq* instr) {
1529 Register left = ToRegister(instr->InputAt(0)); 1529 Register left = ToRegister(instr->InputAt(0));
1530 Register right = ToRegister(instr->InputAt(1)); 1530 Register right = ToRegister(instr->InputAt(1));
1531 Register result = ToRegister(instr->result()); 1531 Register result = ToRegister(instr->result());
1532 1532
1533 NearLabel different, done; 1533 Label different, done;
1534 __ cmpq(left, right); 1534 __ cmpq(left, right);
1535 __ j(not_equal, &different); 1535 __ j(not_equal, &different, Label::kNear);
1536 __ LoadRoot(result, Heap::kTrueValueRootIndex); 1536 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1537 __ jmp(&done); 1537 __ jmp(&done, Label::kNear);
1538 __ bind(&different); 1538 __ bind(&different);
1539 __ LoadRoot(result, Heap::kFalseValueRootIndex); 1539 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1540 __ bind(&done); 1540 __ bind(&done);
1541 } 1541 }
1542 1542
1543 1543
1544 void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) { 1544 void LCodeGen::DoCmpJSObjectEqAndBranch(LCmpJSObjectEqAndBranch* instr) {
1545 Register left = ToRegister(instr->InputAt(0)); 1545 Register left = ToRegister(instr->InputAt(0));
1546 Register right = ToRegister(instr->InputAt(1)); 1546 Register right = ToRegister(instr->InputAt(1));
1547 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1547 int false_block = chunk_->LookupDestination(instr->false_block_id());
(...skipping 13 matching lines...) Expand all
1561 // Consider adding other type and representation tests too. 1561 // Consider adding other type and representation tests too.
1562 if (instr->hydrogen()->value()->type().IsSmi()) { 1562 if (instr->hydrogen()->value()->type().IsSmi()) {
1563 __ LoadRoot(result, Heap::kFalseValueRootIndex); 1563 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1564 return; 1564 return;
1565 } 1565 }
1566 1566
1567 __ CompareRoot(reg, Heap::kNullValueRootIndex); 1567 __ CompareRoot(reg, Heap::kNullValueRootIndex);
1568 if (instr->is_strict()) { 1568 if (instr->is_strict()) {
1569 ASSERT(Heap::kTrueValueRootIndex >= 0); 1569 ASSERT(Heap::kTrueValueRootIndex >= 0);
1570 __ movl(result, Immediate(Heap::kTrueValueRootIndex)); 1570 __ movl(result, Immediate(Heap::kTrueValueRootIndex));
1571 NearLabel load; 1571 Label load;
1572 __ j(equal, &load); 1572 __ j(equal, &load, Label::kNear);
1573 __ Set(result, Heap::kFalseValueRootIndex); 1573 __ Set(result, Heap::kFalseValueRootIndex);
1574 __ bind(&load); 1574 __ bind(&load);
1575 __ LoadRootIndexed(result, result, 0); 1575 __ LoadRootIndexed(result, result, 0);
1576 } else { 1576 } else {
1577 NearLabel true_value, false_value, done; 1577 NearLabel false_value;
1578 __ j(equal, &true_value); 1578 Label true_value, done;
1579 __ j(equal, &true_value, Label::kNear);
1579 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); 1580 __ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
1580 __ j(equal, &true_value); 1581 __ j(equal, &true_value, Label::kNear);
1581 __ JumpIfSmi(reg, &false_value); 1582 __ JumpIfSmi(reg, &false_value);
1582 // Check for undetectable objects by looking in the bit field in 1583 // Check for undetectable objects by looking in the bit field in
1583 // the map. The object has already been smi checked. 1584 // the map. The object has already been smi checked.
1584 Register scratch = result; 1585 Register scratch = result;
1585 __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset)); 1586 __ movq(scratch, FieldOperand(reg, HeapObject::kMapOffset));
1586 __ testb(FieldOperand(scratch, Map::kBitFieldOffset), 1587 __ testb(FieldOperand(scratch, Map::kBitFieldOffset),
1587 Immediate(1 << Map::kIsUndetectable)); 1588 Immediate(1 << Map::kIsUndetectable));
1588 __ j(not_zero, &true_value); 1589 __ j(not_zero, &true_value, Label::kNear);
1589 __ bind(&false_value); 1590 __ bind(&false_value);
1590 __ LoadRoot(result, Heap::kFalseValueRootIndex); 1591 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1591 __ jmp(&done); 1592 __ jmp(&done, Label::kNear);
1592 __ bind(&true_value); 1593 __ bind(&true_value);
1593 __ LoadRoot(result, Heap::kTrueValueRootIndex); 1594 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1594 __ bind(&done); 1595 __ bind(&done);
1595 } 1596 }
1596 } 1597 }
1597 1598
1598 1599
1599 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) { 1600 void LCodeGen::DoIsNullAndBranch(LIsNullAndBranch* instr) {
1600 Register reg = ToRegister(instr->InputAt(0)); 1601 Register reg = ToRegister(instr->InputAt(0));
1601 1602
(...skipping 140 matching lines...) Expand 10 before | Expand all | Expand 10 after
1742 return equal; 1743 return equal;
1743 } 1744 }
1744 1745
1745 1746
1746 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) { 1747 void LCodeGen::DoHasInstanceType(LHasInstanceType* instr) {
1747 Register input = ToRegister(instr->InputAt(0)); 1748 Register input = ToRegister(instr->InputAt(0));
1748 Register result = ToRegister(instr->result()); 1749 Register result = ToRegister(instr->result());
1749 1750
1750 ASSERT(instr->hydrogen()->value()->representation().IsTagged()); 1751 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1751 __ testl(input, Immediate(kSmiTagMask)); 1752 __ testl(input, Immediate(kSmiTagMask));
1752 NearLabel done, is_false; 1753 Label done, is_false;
1753 __ j(zero, &is_false); 1754 __ j(zero, &is_false);
1754 __ CmpObjectType(input, TestType(instr->hydrogen()), result); 1755 __ CmpObjectType(input, TestType(instr->hydrogen()), result);
1755 __ j(NegateCondition(BranchCondition(instr->hydrogen())), &is_false); 1756 __ j(NegateCondition(BranchCondition(instr->hydrogen())),
1757 &is_false, Label::kNear);
1756 __ LoadRoot(result, Heap::kTrueValueRootIndex); 1758 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1757 __ jmp(&done); 1759 __ jmp(&done, Label::kNear);
1758 __ bind(&is_false); 1760 __ bind(&is_false);
1759 __ LoadRoot(result, Heap::kFalseValueRootIndex); 1761 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1760 __ bind(&done); 1762 __ bind(&done);
1761 } 1763 }
1762 1764
1763 1765
1764 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) { 1766 void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) {
1765 Register input = ToRegister(instr->InputAt(0)); 1767 Register input = ToRegister(instr->InputAt(0));
1766 1768
1767 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1769 int true_block = chunk_->LookupDestination(instr->true_block_id());
(...skipping 23 matching lines...) Expand all
1791 1793
1792 1794
1793 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) { 1795 void LCodeGen::DoHasCachedArrayIndex(LHasCachedArrayIndex* instr) {
1794 Register input = ToRegister(instr->InputAt(0)); 1796 Register input = ToRegister(instr->InputAt(0));
1795 Register result = ToRegister(instr->result()); 1797 Register result = ToRegister(instr->result());
1796 1798
1797 ASSERT(instr->hydrogen()->value()->representation().IsTagged()); 1799 ASSERT(instr->hydrogen()->value()->representation().IsTagged());
1798 __ LoadRoot(result, Heap::kTrueValueRootIndex); 1800 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1799 __ testl(FieldOperand(input, String::kHashFieldOffset), 1801 __ testl(FieldOperand(input, String::kHashFieldOffset),
1800 Immediate(String::kContainsCachedArrayIndexMask)); 1802 Immediate(String::kContainsCachedArrayIndexMask));
1801 NearLabel done; 1803 Label done;
1802 __ j(zero, &done); 1804 __ j(zero, &done, Label::kNear);
1803 __ LoadRoot(result, Heap::kFalseValueRootIndex); 1805 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1804 __ bind(&done); 1806 __ bind(&done);
1805 } 1807 }
1806 1808
1807 1809
1808 void LCodeGen::DoHasCachedArrayIndexAndBranch( 1810 void LCodeGen::DoHasCachedArrayIndexAndBranch(
1809 LHasCachedArrayIndexAndBranch* instr) { 1811 LHasCachedArrayIndexAndBranch* instr) {
1810 Register input = ToRegister(instr->InputAt(0)); 1812 Register input = ToRegister(instr->InputAt(0));
1811 1813
1812 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1814 int true_block = chunk_->LookupDestination(instr->true_block_id());
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1871 // End with the answer in the z flag. 1873 // End with the answer in the z flag.
1872 } 1874 }
1873 1875
1874 1876
1875 void LCodeGen::DoClassOfTest(LClassOfTest* instr) { 1877 void LCodeGen::DoClassOfTest(LClassOfTest* instr) {
1876 Register input = ToRegister(instr->InputAt(0)); 1878 Register input = ToRegister(instr->InputAt(0));
1877 Register result = ToRegister(instr->result()); 1879 Register result = ToRegister(instr->result());
1878 ASSERT(input.is(result)); 1880 ASSERT(input.is(result));
1879 Register temp = ToRegister(instr->TempAt(0)); 1881 Register temp = ToRegister(instr->TempAt(0));
1880 Handle<String> class_name = instr->hydrogen()->class_name(); 1882 Handle<String> class_name = instr->hydrogen()->class_name();
1881 NearLabel done; 1883 Label done;
1882 Label is_true, is_false; 1884 Label is_true, is_false;
1883 1885
1884 EmitClassOfTest(&is_true, &is_false, class_name, input, temp); 1886 EmitClassOfTest(&is_true, &is_false, class_name, input, temp);
1885 1887
1886 __ j(not_equal, &is_false); 1888 __ j(not_equal, &is_false);
1887 1889
1888 __ bind(&is_true); 1890 __ bind(&is_true);
1889 __ LoadRoot(result, Heap::kTrueValueRootIndex); 1891 __ LoadRoot(result, Heap::kTrueValueRootIndex);
1890 __ jmp(&done); 1892 __ jmp(&done, Label::kNear);
1891 1893
1892 __ bind(&is_false); 1894 __ bind(&is_false);
1893 __ LoadRoot(result, Heap::kFalseValueRootIndex); 1895 __ LoadRoot(result, Heap::kFalseValueRootIndex);
1894 __ bind(&done); 1896 __ bind(&done);
1895 } 1897 }
1896 1898
1897 1899
1898 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) { 1900 void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) {
1899 Register input = ToRegister(instr->InputAt(0)); 1901 Register input = ToRegister(instr->InputAt(0));
1900 Register temp = ToRegister(instr->TempAt(0)); 1902 Register temp = ToRegister(instr->TempAt(0));
(...skipping 19 matching lines...) Expand all
1920 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); 1922 __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map());
1921 EmitBranch(true_block, false_block, equal); 1923 EmitBranch(true_block, false_block, equal);
1922 } 1924 }
1923 1925
1924 1926
1925 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { 1927 void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
1926 InstanceofStub stub(InstanceofStub::kNoFlags); 1928 InstanceofStub stub(InstanceofStub::kNoFlags);
1927 __ push(ToRegister(instr->InputAt(0))); 1929 __ push(ToRegister(instr->InputAt(0)));
1928 __ push(ToRegister(instr->InputAt(1))); 1930 __ push(ToRegister(instr->InputAt(1)));
1929 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 1931 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
1930 NearLabel true_value, done; 1932 Label true_value, done;
1931 __ testq(rax, rax); 1933 __ testq(rax, rax);
1932 __ j(zero, &true_value); 1934 __ j(zero, &true_value, Label::kNear);
1933 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 1935 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
1934 __ jmp(&done); 1936 __ jmp(&done, Label::kNear);
1935 __ bind(&true_value); 1937 __ bind(&true_value);
1936 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 1938 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
1937 __ bind(&done); 1939 __ bind(&done);
1938 } 1940 }
1939 1941
1940 1942
1941 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) { 1943 void LCodeGen::DoInstanceOfAndBranch(LInstanceOfAndBranch* instr) {
1942 int true_block = chunk_->LookupDestination(instr->true_block_id()); 1944 int true_block = chunk_->LookupDestination(instr->true_block_id());
1943 int false_block = chunk_->LookupDestination(instr->false_block_id()); 1945 int false_block = chunk_->LookupDestination(instr->false_block_id());
1944 1946
(...skipping 29 matching lines...) Expand all
1974 1976
1975 Label done, false_result; 1977 Label done, false_result;
1976 Register object = ToRegister(instr->InputAt(0)); 1978 Register object = ToRegister(instr->InputAt(0));
1977 1979
1978 // A Smi is not an instance of anything. 1980 // A Smi is not an instance of anything.
1979 __ JumpIfSmi(object, &false_result); 1981 __ JumpIfSmi(object, &false_result);
1980 1982
1981 // This is the inlined call site instanceof cache. The two occurences of the 1983 // This is the inlined call site instanceof cache. The two occurences of the
1982 // hole value will be patched to the last map/result pair generated by the 1984 // hole value will be patched to the last map/result pair generated by the
1983 // instanceof stub. 1985 // instanceof stub.
1984 NearLabel cache_miss; 1986 Label cache_miss;
1985 // Use a temp register to avoid memory operands with variable lengths. 1987 // Use a temp register to avoid memory operands with variable lengths.
1986 Register map = ToRegister(instr->TempAt(0)); 1988 Register map = ToRegister(instr->TempAt(0));
1987 __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); 1989 __ movq(map, FieldOperand(object, HeapObject::kMapOffset));
1988 __ bind(deferred->map_check()); // Label for calculating code patching. 1990 __ bind(deferred->map_check()); // Label for calculating code patching.
1989 __ movq(kScratchRegister, factory()->the_hole_value(), 1991 __ movq(kScratchRegister, factory()->the_hole_value(),
1990 RelocInfo::EMBEDDED_OBJECT); 1992 RelocInfo::EMBEDDED_OBJECT);
1991 __ cmpq(map, kScratchRegister); // Patched to cached map. 1993 __ cmpq(map, kScratchRegister); // Patched to cached map.
1992 __ j(not_equal, &cache_miss); 1994 __ j(not_equal, &cache_miss, Label::kNear);
1993 // Patched to load either true or false. 1995 // Patched to load either true or false.
1994 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); 1996 __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
1995 #ifdef DEBUG 1997 #ifdef DEBUG
1996 // Check that the code size between patch label and patch sites is invariant. 1998 // Check that the code size between patch label and patch sites is invariant.
1997 Label end_of_patched_code; 1999 Label end_of_patched_code;
1998 __ bind(&end_of_patched_code); 2000 __ bind(&end_of_patched_code);
1999 ASSERT(true); 2001 ASSERT(true);
2000 #endif 2002 #endif
2001 __ jmp(&done); 2003 __ jmp(&done);
2002 2004
2003 // The inlined call site cache did not match. Check for null and string 2005 // The inlined call site cache did not match. Check for null and string
2004 // before calling the deferred code. 2006 // before calling the deferred code.
2005 __ bind(&cache_miss); // Null is not an instance of anything. 2007 __ bind(&cache_miss); // Null is not an instance of anything.
2006 __ CompareRoot(object, Heap::kNullValueRootIndex); 2008 __ CompareRoot(object, Heap::kNullValueRootIndex);
2007 __ j(equal, &false_result); 2009 __ j(equal, &false_result, Label::kNear);
2008 2010
2009 // String values are not instances of anything. 2011 // String values are not instances of anything.
2010 __ JumpIfNotString(object, kScratchRegister, deferred->entry()); 2012 __ JumpIfNotString(object, kScratchRegister, deferred->entry());
2011 2013
2012 __ bind(&false_result); 2014 __ bind(&false_result);
2013 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2015 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2014 2016
2015 __ bind(deferred->exit()); 2017 __ bind(deferred->exit());
2016 __ bind(&done); 2018 __ bind(&done);
2017 } 2019 }
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after
2064 void LCodeGen::DoCmpT(LCmpT* instr) { 2066 void LCodeGen::DoCmpT(LCmpT* instr) {
2065 Token::Value op = instr->op(); 2067 Token::Value op = instr->op();
2066 2068
2067 Handle<Code> ic = CompareIC::GetUninitialized(op); 2069 Handle<Code> ic = CompareIC::GetUninitialized(op);
2068 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2070 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2069 2071
2070 Condition condition = TokenToCondition(op, false); 2072 Condition condition = TokenToCondition(op, false);
2071 if (op == Token::GT || op == Token::LTE) { 2073 if (op == Token::GT || op == Token::LTE) {
2072 condition = ReverseCondition(condition); 2074 condition = ReverseCondition(condition);
2073 } 2075 }
2074 NearLabel true_value, done; 2076 Label true_value, done;
2075 __ testq(rax, rax); 2077 __ testq(rax, rax);
2076 __ j(condition, &true_value); 2078 __ j(condition, &true_value, Label::kNear);
2077 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); 2079 __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex);
2078 __ jmp(&done); 2080 __ jmp(&done, Label::kNear);
2079 __ bind(&true_value); 2081 __ bind(&true_value);
2080 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); 2082 __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex);
2081 __ bind(&done); 2083 __ bind(&done);
2082 } 2084 }
2083 2085
2084 2086
2085 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) { 2087 void LCodeGen::DoCmpTAndBranch(LCmpTAndBranch* instr) {
2086 Token::Value op = instr->op(); 2088 Token::Value op = instr->op();
2087 int true_block = chunk_->LookupDestination(instr->true_block_id()); 2089 int true_block = chunk_->LookupDestination(instr->true_block_id());
2088 int false_block = chunk_->LookupDestination(instr->false_block_id()); 2090 int false_block = chunk_->LookupDestination(instr->false_block_id());
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after
2241 2243
2242 int map_count = instr->hydrogen()->types()->length(); 2244 int map_count = instr->hydrogen()->types()->length();
2243 Handle<String> name = instr->hydrogen()->name(); 2245 Handle<String> name = instr->hydrogen()->name();
2244 2246
2245 if (map_count == 0) { 2247 if (map_count == 0) {
2246 ASSERT(instr->hydrogen()->need_generic()); 2248 ASSERT(instr->hydrogen()->need_generic());
2247 __ Move(rcx, instr->hydrogen()->name()); 2249 __ Move(rcx, instr->hydrogen()->name());
2248 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2250 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2249 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2251 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2250 } else { 2252 } else {
2251 NearLabel done; 2253 Label done;
2252 for (int i = 0; i < map_count - 1; ++i) { 2254 for (int i = 0; i < map_count - 1; ++i) {
2253 Handle<Map> map = instr->hydrogen()->types()->at(i); 2255 Handle<Map> map = instr->hydrogen()->types()->at(i);
2254 NearLabel next; 2256 Label next;
2255 __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map); 2257 __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2256 __ j(not_equal, &next); 2258 __ j(not_equal, &next, Label::kNear);
2257 EmitLoadFieldOrConstantFunction(result, object, map, name); 2259 EmitLoadFieldOrConstantFunction(result, object, map, name);
2258 __ jmp(&done); 2260 __ jmp(&done, Label::kNear);
2259 __ bind(&next); 2261 __ bind(&next);
2260 } 2262 }
2261 Handle<Map> map = instr->hydrogen()->types()->last(); 2263 Handle<Map> map = instr->hydrogen()->types()->last();
2262 __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map); 2264 __ Cmp(FieldOperand(object, HeapObject::kMapOffset), map);
2263 if (instr->hydrogen()->need_generic()) { 2265 if (instr->hydrogen()->need_generic()) {
2264 NearLabel generic; 2266 Label generic;
2265 __ j(not_equal, &generic); 2267 __ j(not_equal, &generic, Label::kNear);
2266 EmitLoadFieldOrConstantFunction(result, object, map, name); 2268 EmitLoadFieldOrConstantFunction(result, object, map, name);
2267 __ jmp(&done); 2269 __ jmp(&done, Label::kNear);
2268 __ bind(&generic); 2270 __ bind(&generic);
2269 __ Move(rcx, instr->hydrogen()->name()); 2271 __ Move(rcx, instr->hydrogen()->name());
2270 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize(); 2272 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
2271 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2273 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2272 } else { 2274 } else {
2273 DeoptimizeIf(not_equal, instr->environment()); 2275 DeoptimizeIf(not_equal, instr->environment());
2274 EmitLoadFieldOrConstantFunction(result, object, map, name); 2276 EmitLoadFieldOrConstantFunction(result, object, map, name);
2275 } 2277 }
2276 __ bind(&done); 2278 __ bind(&done);
2277 } 2279 }
(...skipping 12 matching lines...) Expand all
2290 2292
2291 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) { 2293 void LCodeGen::DoLoadFunctionPrototype(LLoadFunctionPrototype* instr) {
2292 Register function = ToRegister(instr->function()); 2294 Register function = ToRegister(instr->function());
2293 Register result = ToRegister(instr->result()); 2295 Register result = ToRegister(instr->result());
2294 2296
2295 // Check that the function really is a function. 2297 // Check that the function really is a function.
2296 __ CmpObjectType(function, JS_FUNCTION_TYPE, result); 2298 __ CmpObjectType(function, JS_FUNCTION_TYPE, result);
2297 DeoptimizeIf(not_equal, instr->environment()); 2299 DeoptimizeIf(not_equal, instr->environment());
2298 2300
2299 // Check whether the function has an instance prototype. 2301 // Check whether the function has an instance prototype.
2300 NearLabel non_instance; 2302 Label non_instance;
2301 __ testb(FieldOperand(result, Map::kBitFieldOffset), 2303 __ testb(FieldOperand(result, Map::kBitFieldOffset),
2302 Immediate(1 << Map::kHasNonInstancePrototype)); 2304 Immediate(1 << Map::kHasNonInstancePrototype));
2303 __ j(not_zero, &non_instance); 2305 __ j(not_zero, &non_instance, Label::kNear);
2304 2306
2305 // Get the prototype or initial map from the function. 2307 // Get the prototype or initial map from the function.
2306 __ movq(result, 2308 __ movq(result,
2307 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset)); 2309 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2308 2310
2309 // Check that the function has a prototype or an initial map. 2311 // Check that the function has a prototype or an initial map.
2310 __ CompareRoot(result, Heap::kTheHoleValueRootIndex); 2312 __ CompareRoot(result, Heap::kTheHoleValueRootIndex);
2311 DeoptimizeIf(equal, instr->environment()); 2313 DeoptimizeIf(equal, instr->environment());
2312 2314
2313 // If the function does not have an initial map, we're done. 2315 // If the function does not have an initial map, we're done.
2314 NearLabel done; 2316 Label done;
2315 __ CmpObjectType(result, MAP_TYPE, kScratchRegister); 2317 __ CmpObjectType(result, MAP_TYPE, kScratchRegister);
2316 __ j(not_equal, &done); 2318 __ j(not_equal, &done, Label::kNear);
2317 2319
2318 // Get the prototype from the initial map. 2320 // Get the prototype from the initial map.
2319 __ movq(result, FieldOperand(result, Map::kPrototypeOffset)); 2321 __ movq(result, FieldOperand(result, Map::kPrototypeOffset));
2320 __ jmp(&done); 2322 __ jmp(&done, Label::kNear);
2321 2323
2322 // Non-instance prototype: Fetch prototype from constructor field 2324 // Non-instance prototype: Fetch prototype from constructor field
2323 // in the function's map. 2325 // in the function's map.
2324 __ bind(&non_instance); 2326 __ bind(&non_instance);
2325 __ movq(result, FieldOperand(result, Map::kConstructorOffset)); 2327 __ movq(result, FieldOperand(result, Map::kConstructorOffset));
2326 2328
2327 // All done. 2329 // All done.
2328 __ bind(&done); 2330 __ bind(&done);
2329 } 2331 }
2330 2332
2331 2333
2332 void LCodeGen::DoLoadElements(LLoadElements* instr) { 2334 void LCodeGen::DoLoadElements(LLoadElements* instr) {
2333 Register result = ToRegister(instr->result()); 2335 Register result = ToRegister(instr->result());
2334 Register input = ToRegister(instr->InputAt(0)); 2336 Register input = ToRegister(instr->InputAt(0));
2335 __ movq(result, FieldOperand(input, JSObject::kElementsOffset)); 2337 __ movq(result, FieldOperand(input, JSObject::kElementsOffset));
2336 if (FLAG_debug_code) { 2338 if (FLAG_debug_code) {
2337 NearLabel done; 2339 Label done;
2338 __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset), 2340 __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
2339 Heap::kFixedArrayMapRootIndex); 2341 Heap::kFixedArrayMapRootIndex);
2340 __ j(equal, &done); 2342 __ j(equal, &done, Label::kNear);
2341 __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset), 2343 __ CompareRoot(FieldOperand(result, HeapObject::kMapOffset),
2342 Heap::kFixedCOWArrayMapRootIndex); 2344 Heap::kFixedCOWArrayMapRootIndex);
2343 __ j(equal, &done); 2345 __ j(equal, &done, Label::kNear);
2344 Register temp((result.is(rax)) ? rbx : rax); 2346 Register temp((result.is(rax)) ? rbx : rax);
2345 __ push(temp); 2347 __ push(temp);
2346 __ movq(temp, FieldOperand(result, HeapObject::kMapOffset)); 2348 __ movq(temp, FieldOperand(result, HeapObject::kMapOffset));
2347 __ movzxbq(temp, FieldOperand(temp, Map::kInstanceTypeOffset)); 2349 __ movzxbq(temp, FieldOperand(temp, Map::kInstanceTypeOffset));
2348 __ subq(temp, Immediate(FIRST_EXTERNAL_ARRAY_TYPE)); 2350 __ subq(temp, Immediate(FIRST_EXTERNAL_ARRAY_TYPE));
2349 __ cmpq(temp, Immediate(kExternalArrayTypeCount)); 2351 __ cmpq(temp, Immediate(kExternalArrayTypeCount));
2350 __ pop(temp); 2352 __ pop(temp);
2351 __ Check(below, "Check for fast elements failed."); 2353 __ Check(below, "Check for fast elements failed.");
2352 __ bind(&done); 2354 __ bind(&done);
2353 } 2355 }
(...skipping 99 matching lines...) Expand 10 before | Expand all | Expand 10 after
2453 2455
2454 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize(); 2456 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2455 CallCode(ic, RelocInfo::CODE_TARGET, instr); 2457 CallCode(ic, RelocInfo::CODE_TARGET, instr);
2456 } 2458 }
2457 2459
2458 2460
2459 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) { 2461 void LCodeGen::DoArgumentsElements(LArgumentsElements* instr) {
2460 Register result = ToRegister(instr->result()); 2462 Register result = ToRegister(instr->result());
2461 2463
2462 // Check for arguments adapter frame. 2464 // Check for arguments adapter frame.
2463 NearLabel done, adapted; 2465 Label done, adapted;
2464 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2466 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2465 __ Cmp(Operand(result, StandardFrameConstants::kContextOffset), 2467 __ Cmp(Operand(result, StandardFrameConstants::kContextOffset),
2466 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 2468 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
2467 __ j(equal, &adapted); 2469 __ j(equal, &adapted, Label::kNear);
2468 2470
2469 // No arguments adaptor frame. 2471 // No arguments adaptor frame.
2470 __ movq(result, rbp); 2472 __ movq(result, rbp);
2471 __ jmp(&done); 2473 __ jmp(&done, Label::kNear);
2472 2474
2473 // Arguments adaptor frame present. 2475 // Arguments adaptor frame present.
2474 __ bind(&adapted); 2476 __ bind(&adapted);
2475 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2477 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2476 2478
2477 // Result is the frame pointer for the frame if not adapted and for the real 2479 // Result is the frame pointer for the frame if not adapted and for the real
2478 // frame below the adaptor frame if adapted. 2480 // frame below the adaptor frame if adapted.
2479 __ bind(&done); 2481 __ bind(&done);
2480 } 2482 }
2481 2483
2482 2484
2483 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) { 2485 void LCodeGen::DoArgumentsLength(LArgumentsLength* instr) {
2484 Register result = ToRegister(instr->result()); 2486 Register result = ToRegister(instr->result());
2485 2487
2486 NearLabel done; 2488 Label done;
2487 2489
2488 // If no arguments adaptor frame the number of arguments is fixed. 2490 // If no arguments adaptor frame the number of arguments is fixed.
2489 if (instr->InputAt(0)->IsRegister()) { 2491 if (instr->InputAt(0)->IsRegister()) {
2490 __ cmpq(rbp, ToRegister(instr->InputAt(0))); 2492 __ cmpq(rbp, ToRegister(instr->InputAt(0)));
2491 } else { 2493 } else {
2492 __ cmpq(rbp, ToOperand(instr->InputAt(0))); 2494 __ cmpq(rbp, ToOperand(instr->InputAt(0)));
2493 } 2495 }
2494 __ movl(result, Immediate(scope()->num_parameters())); 2496 __ movl(result, Immediate(scope()->num_parameters()));
2495 __ j(equal, &done); 2497 __ j(equal, &done, Label::kNear);
2496 2498
2497 // Arguments adaptor frame present. Get argument length from there. 2499 // Arguments adaptor frame present. Get argument length from there.
2498 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 2500 __ movq(result, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
2499 __ SmiToInteger32(result, 2501 __ SmiToInteger32(result,
2500 Operand(result, 2502 Operand(result,
2501 ArgumentsAdaptorFrameConstants::kLengthOffset)); 2503 ArgumentsAdaptorFrameConstants::kLengthOffset));
2502 2504
2503 // Argument length is in result register. 2505 // Argument length is in result register.
2504 __ bind(&done); 2506 __ bind(&done);
2505 } 2507 }
2506 2508
2507 2509
2508 void LCodeGen::DoApplyArguments(LApplyArguments* instr) { 2510 void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
2509 Register receiver = ToRegister(instr->receiver()); 2511 Register receiver = ToRegister(instr->receiver());
2510 Register function = ToRegister(instr->function()); 2512 Register function = ToRegister(instr->function());
2511 Register length = ToRegister(instr->length()); 2513 Register length = ToRegister(instr->length());
2512 Register elements = ToRegister(instr->elements()); 2514 Register elements = ToRegister(instr->elements());
2513 ASSERT(receiver.is(rax)); // Used for parameter count. 2515 ASSERT(receiver.is(rax)); // Used for parameter count.
2514 ASSERT(function.is(rdi)); // Required by InvokeFunction. 2516 ASSERT(function.is(rdi)); // Required by InvokeFunction.
2515 ASSERT(ToRegister(instr->result()).is(rax)); 2517 ASSERT(ToRegister(instr->result()).is(rax));
2516 2518
2517 // If the receiver is null or undefined, we have to pass the global object 2519 // If the receiver is null or undefined, we have to pass the global object
2518 // as a receiver. 2520 // as a receiver.
2519 NearLabel global_object, receiver_ok; 2521 Label global_object, receiver_ok;
2520 __ CompareRoot(receiver, Heap::kNullValueRootIndex); 2522 __ CompareRoot(receiver, Heap::kNullValueRootIndex);
2521 __ j(equal, &global_object); 2523 __ j(equal, &global_object, Label::kNear);
2522 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex); 2524 __ CompareRoot(receiver, Heap::kUndefinedValueRootIndex);
2523 __ j(equal, &global_object); 2525 __ j(equal, &global_object, Label::kNear);
2524 2526
2525 // The receiver should be a JS object. 2527 // The receiver should be a JS object.
2526 Condition is_smi = __ CheckSmi(receiver); 2528 Condition is_smi = __ CheckSmi(receiver);
2527 DeoptimizeIf(is_smi, instr->environment()); 2529 DeoptimizeIf(is_smi, instr->environment());
2528 __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, kScratchRegister); 2530 __ CmpObjectType(receiver, FIRST_JS_OBJECT_TYPE, kScratchRegister);
2529 DeoptimizeIf(below, instr->environment()); 2531 DeoptimizeIf(below, instr->environment());
2530 __ jmp(&receiver_ok); 2532 __ jmp(&receiver_ok, Label::kNear);
2531 2533
2532 __ bind(&global_object); 2534 __ bind(&global_object);
2533 // TODO(kmillikin): We have a hydrogen value for the global object. See 2535 // TODO(kmillikin): We have a hydrogen value for the global object. See
2534 // if it's better to use it than to explicitly fetch it from the context 2536 // if it's better to use it than to explicitly fetch it from the context
2535 // here. 2537 // here.
2536 __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset)); 2538 __ movq(receiver, Operand(rbp, StandardFrameConstants::kContextOffset));
2537 __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX)); 2539 __ movq(receiver, ContextOperand(receiver, Context::GLOBAL_INDEX));
2538 __ bind(&receiver_ok); 2540 __ bind(&receiver_ok);
2539 2541
2540 // Copy the arguments to this function possibly from the 2542 // Copy the arguments to this function possibly from the
2541 // adaptor frame below it. 2543 // adaptor frame below it.
2542 const uint32_t kArgumentsLimit = 1 * KB; 2544 const uint32_t kArgumentsLimit = 1 * KB;
2543 __ cmpq(length, Immediate(kArgumentsLimit)); 2545 __ cmpq(length, Immediate(kArgumentsLimit));
2544 DeoptimizeIf(above, instr->environment()); 2546 DeoptimizeIf(above, instr->environment());
2545 2547
2546 __ push(receiver); 2548 __ push(receiver);
2547 __ movq(receiver, length); 2549 __ movq(receiver, length);
2548 2550
2549 // Loop through the arguments pushing them onto the execution 2551 // Loop through the arguments pushing them onto the execution
2550 // stack. 2552 // stack.
2551 NearLabel invoke, loop; 2553 Label invoke, loop;
2552 // length is a small non-negative integer, due to the test above. 2554 // length is a small non-negative integer, due to the test above.
2553 __ testl(length, length); 2555 __ testl(length, length);
2554 __ j(zero, &invoke); 2556 __ j(zero, &invoke, Label::kNear);
2555 __ bind(&loop); 2557 __ bind(&loop);
2556 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize)); 2558 __ push(Operand(elements, length, times_pointer_size, 1 * kPointerSize));
2557 __ decl(length); 2559 __ decl(length);
2558 __ j(not_zero, &loop); 2560 __ j(not_zero, &loop);
2559 2561
2560 // Invoke the function. 2562 // Invoke the function.
2561 __ bind(&invoke); 2563 __ bind(&invoke);
2562 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment()); 2564 ASSERT(instr->HasPointerMap() && instr->HasDeoptimizationEnvironment());
2563 LPointerMap* pointers = instr->pointer_map(); 2565 LPointerMap* pointers = instr->pointer_map();
2564 LEnvironment* env = instr->deoptimization_environment(); 2566 LEnvironment* env = instr->deoptimization_environment();
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after
2791 2793
2792 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) { 2794 void LCodeGen::DoMathRound(LUnaryMathOperation* instr) {
2793 const XMMRegister xmm_scratch = xmm0; 2795 const XMMRegister xmm_scratch = xmm0;
2794 Register output_reg = ToRegister(instr->result()); 2796 Register output_reg = ToRegister(instr->result());
2795 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0)); 2797 XMMRegister input_reg = ToDoubleRegister(instr->InputAt(0));
2796 2798
2797 Label done; 2799 Label done;
2798 // xmm_scratch = 0.5 2800 // xmm_scratch = 0.5
2799 __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE); 2801 __ movq(kScratchRegister, V8_INT64_C(0x3FE0000000000000), RelocInfo::NONE);
2800 __ movq(xmm_scratch, kScratchRegister); 2802 __ movq(xmm_scratch, kScratchRegister);
2801 NearLabel below_half; 2803 Label below_half;
2802 __ ucomisd(xmm_scratch, input_reg); 2804 __ ucomisd(xmm_scratch, input_reg);
2803 __ j(above, &below_half); // If input_reg is NaN, this doesn't jump. 2805 // If input_reg is NaN, this doesn't jump.
2806 __ j(above, &below_half, Label::kNear);
2804 // input = input + 0.5 2807 // input = input + 0.5
2805 // This addition might give a result that isn't the correct for 2808 // This addition might give a result that isn't the correct for
2806 // rounding, due to loss of precision, but only for a number that's 2809 // rounding, due to loss of precision, but only for a number that's
2807 // so big that the conversion below will overflow anyway. 2810 // so big that the conversion below will overflow anyway.
2808 __ addsd(input_reg, xmm_scratch); 2811 __ addsd(input_reg, xmm_scratch);
2809 // Compute Math.floor(input). 2812 // Compute Math.floor(input).
2810 // Use truncating instruction (OK because input is positive). 2813 // Use truncating instruction (OK because input is positive).
2811 __ cvttsd2si(output_reg, input_reg); 2814 __ cvttsd2si(output_reg, input_reg);
2812 // Overflow is signalled with minint. 2815 // Overflow is signalled with minint.
2813 __ cmpl(output_reg, Immediate(0x80000000)); 2816 __ cmpl(output_reg, Immediate(0x80000000));
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after
3100 __ cvtsd2ss(value, value); 3103 __ cvtsd2ss(value, value);
3101 __ movss(Operand(external_pointer, key, times_4, 0), value); 3104 __ movss(Operand(external_pointer, key, times_4, 0), value);
3102 } else if (array_type == kExternalDoubleArray) { 3105 } else if (array_type == kExternalDoubleArray) {
3103 __ movsd(Operand(external_pointer, key, times_8, 0), 3106 __ movsd(Operand(external_pointer, key, times_8, 0),
3104 ToDoubleRegister(instr->value())); 3107 ToDoubleRegister(instr->value()));
3105 } else { 3108 } else {
3106 Register value(ToRegister(instr->value())); 3109 Register value(ToRegister(instr->value()));
3107 switch (array_type) { 3110 switch (array_type) {
3108 case kExternalPixelArray: 3111 case kExternalPixelArray:
3109 { // Clamp the value to [0..255]. 3112 { // Clamp the value to [0..255].
3110 NearLabel done; 3113 Label done;
3111 __ testl(value, Immediate(0xFFFFFF00)); 3114 __ testl(value, Immediate(0xFFFFFF00));
3112 __ j(zero, &done); 3115 __ j(zero, &done, Label::kNear);
3113 __ setcc(negative, value); // 1 if negative, 0 if positive. 3116 __ setcc(negative, value); // 1 if negative, 0 if positive.
3114 __ decb(value); // 0 if negative, 255 if positive. 3117 __ decb(value); // 0 if negative, 255 if positive.
3115 __ bind(&done); 3118 __ bind(&done);
3116 __ movb(Operand(external_pointer, key, times_1, 0), value); 3119 __ movb(Operand(external_pointer, key, times_1, 0), value);
3117 } 3120 }
3118 break; 3121 break;
3119 case kExternalByteArray: 3122 case kExternalByteArray:
3120 case kExternalUnsignedByteArray: 3123 case kExternalUnsignedByteArray:
3121 __ movb(Operand(external_pointer, key, times_1, 0), value); 3124 __ movb(Operand(external_pointer, key, times_1, 0), value);
3122 break; 3125 break;
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after
3225 return; 3228 return;
3226 } 3229 }
3227 } else { 3230 } else {
3228 index = ToRegister(instr->index()); 3231 index = ToRegister(instr->index());
3229 } 3232 }
3230 Register result = ToRegister(instr->result()); 3233 Register result = ToRegister(instr->result());
3231 3234
3232 DeferredStringCharCodeAt* deferred = 3235 DeferredStringCharCodeAt* deferred =
3233 new DeferredStringCharCodeAt(this, instr); 3236 new DeferredStringCharCodeAt(this, instr);
3234 3237
3235 NearLabel flat_string, ascii_string, done; 3238 Label flat_string, ascii_string, done;
3236 3239
3237 // Fetch the instance type of the receiver into result register. 3240 // Fetch the instance type of the receiver into result register.
3238 __ movq(result, FieldOperand(string, HeapObject::kMapOffset)); 3241 __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
3239 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset)); 3242 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
3240 3243
3241 // We need special handling for non-sequential strings. 3244 // We need special handling for non-sequential strings.
3242 STATIC_ASSERT(kSeqStringTag == 0); 3245 STATIC_ASSERT(kSeqStringTag == 0);
3243 __ testb(result, Immediate(kStringRepresentationMask)); 3246 __ testb(result, Immediate(kStringRepresentationMask));
3244 __ j(zero, &flat_string); 3247 __ j(zero, &flat_string, Label::kNear);
3245 3248
3246 // Handle cons strings and go to deferred code for the rest. 3249 // Handle cons strings and go to deferred code for the rest.
3247 __ testb(result, Immediate(kIsConsStringMask)); 3250 __ testb(result, Immediate(kIsConsStringMask));
3248 __ j(zero, deferred->entry()); 3251 __ j(zero, deferred->entry());
3249 3252
3250 // ConsString. 3253 // ConsString.
3251 // Check whether the right hand side is the empty string (i.e. if 3254 // Check whether the right hand side is the empty string (i.e. if
3252 // this is really a flat string in a cons string). If that is not 3255 // this is really a flat string in a cons string). If that is not
3253 // the case we would rather go to the runtime system now to flatten 3256 // the case we would rather go to the runtime system now to flatten
3254 // the string. 3257 // the string.
3255 __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset), 3258 __ CompareRoot(FieldOperand(string, ConsString::kSecondOffset),
3256 Heap::kEmptyStringRootIndex); 3259 Heap::kEmptyStringRootIndex);
3257 __ j(not_equal, deferred->entry()); 3260 __ j(not_equal, deferred->entry());
3258 // Get the first of the two strings and load its instance type. 3261 // Get the first of the two strings and load its instance type.
3259 __ movq(string, FieldOperand(string, ConsString::kFirstOffset)); 3262 __ movq(string, FieldOperand(string, ConsString::kFirstOffset));
3260 __ movq(result, FieldOperand(string, HeapObject::kMapOffset)); 3263 __ movq(result, FieldOperand(string, HeapObject::kMapOffset));
3261 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset)); 3264 __ movzxbl(result, FieldOperand(result, Map::kInstanceTypeOffset));
3262 // If the first cons component is also non-flat, then go to runtime. 3265 // If the first cons component is also non-flat, then go to runtime.
3263 STATIC_ASSERT(kSeqStringTag == 0); 3266 STATIC_ASSERT(kSeqStringTag == 0);
3264 __ testb(result, Immediate(kStringRepresentationMask)); 3267 __ testb(result, Immediate(kStringRepresentationMask));
3265 __ j(not_zero, deferred->entry()); 3268 __ j(not_zero, deferred->entry());
3266 3269
3267 // Check for ASCII or two-byte string. 3270 // Check for ASCII or two-byte string.
3268 __ bind(&flat_string); 3271 __ bind(&flat_string);
3269 STATIC_ASSERT(kAsciiStringTag != 0); 3272 STATIC_ASSERT(kAsciiStringTag != 0);
3270 __ testb(result, Immediate(kStringEncodingMask)); 3273 __ testb(result, Immediate(kStringEncodingMask));
3271 __ j(not_zero, &ascii_string); 3274 __ j(not_zero, &ascii_string, Label::kNear);
3272 3275
3273 // Two-byte string. 3276 // Two-byte string.
3274 // Load the two-byte character code into the result register. 3277 // Load the two-byte character code into the result register.
3275 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1); 3278 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3276 if (instr->index()->IsConstantOperand()) { 3279 if (instr->index()->IsConstantOperand()) {
3277 __ movzxwl(result, 3280 __ movzxwl(result,
3278 FieldOperand(string, 3281 FieldOperand(string,
3279 SeqTwoByteString::kHeaderSize + 3282 SeqTwoByteString::kHeaderSize +
3280 (kUC16Size * const_index))); 3283 (kUC16Size * const_index)));
3281 } else { 3284 } else {
3282 __ movzxwl(result, FieldOperand(string, 3285 __ movzxwl(result, FieldOperand(string,
3283 index, 3286 index,
3284 times_2, 3287 times_2,
3285 SeqTwoByteString::kHeaderSize)); 3288 SeqTwoByteString::kHeaderSize));
3286 } 3289 }
3287 __ jmp(&done); 3290 __ jmp(&done, Label::kNear);
3288 3291
3289 // ASCII string. 3292 // ASCII string.
3290 // Load the byte into the result register. 3293 // Load the byte into the result register.
3291 __ bind(&ascii_string); 3294 __ bind(&ascii_string);
3292 if (instr->index()->IsConstantOperand()) { 3295 if (instr->index()->IsConstantOperand()) {
3293 __ movzxbl(result, FieldOperand(string, 3296 __ movzxbl(result, FieldOperand(string,
3294 SeqAsciiString::kHeaderSize + const_index)); 3297 SeqAsciiString::kHeaderSize + const_index));
3295 } else { 3298 } else {
3296 __ movzxbl(result, FieldOperand(string, 3299 __ movzxbl(result, FieldOperand(string,
3297 index, 3300 index,
(...skipping 169 matching lines...) Expand 10 before | Expand all | Expand 10 after
3467 Condition is_smi = __ CheckSmi(input); 3470 Condition is_smi = __ CheckSmi(input);
3468 DeoptimizeIf(NegateCondition(is_smi), instr->environment()); 3471 DeoptimizeIf(NegateCondition(is_smi), instr->environment());
3469 } 3472 }
3470 __ SmiToInteger32(input, input); 3473 __ SmiToInteger32(input, input);
3471 } 3474 }
3472 3475
3473 3476
3474 void LCodeGen::EmitNumberUntagD(Register input_reg, 3477 void LCodeGen::EmitNumberUntagD(Register input_reg,
3475 XMMRegister result_reg, 3478 XMMRegister result_reg,
3476 LEnvironment* env) { 3479 LEnvironment* env) {
3477 NearLabel load_smi, heap_number, done; 3480 NearLabel load_smi;
3481 Label heap_number, done;
3478 3482
3479 // Smi check. 3483 // Smi check.
3480 __ JumpIfSmi(input_reg, &load_smi); 3484 __ JumpIfSmi(input_reg, &load_smi);
3481 3485
3482 // Heap number map check. 3486 // Heap number map check.
3483 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), 3487 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3484 Heap::kHeapNumberMapRootIndex); 3488 Heap::kHeapNumberMapRootIndex);
3485 __ j(equal, &heap_number); 3489 __ j(equal, &heap_number, Label::kNear);
3486 3490
3487 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); 3491 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3488 DeoptimizeIf(not_equal, env); 3492 DeoptimizeIf(not_equal, env);
3489 3493
3490 // Convert undefined to NaN. Compute NaN as 0/0. 3494 // Convert undefined to NaN. Compute NaN as 0/0.
3491 __ xorps(result_reg, result_reg); 3495 __ xorps(result_reg, result_reg);
3492 __ divsd(result_reg, result_reg); 3496 __ divsd(result_reg, result_reg);
3493 __ jmp(&done); 3497 __ jmp(&done, Label::kNear);
3494 3498
3495 // Heap number to XMM conversion. 3499 // Heap number to XMM conversion.
3496 __ bind(&heap_number); 3500 __ bind(&heap_number);
3497 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset)); 3501 __ movsd(result_reg, FieldOperand(input_reg, HeapNumber::kValueOffset));
3498 __ jmp(&done); 3502 __ jmp(&done, Label::kNear);
3499 3503
3500 // Smi to XMM conversion 3504 // Smi to XMM conversion
3501 __ bind(&load_smi); 3505 __ bind(&load_smi);
3502 __ SmiToInteger32(kScratchRegister, input_reg); 3506 __ SmiToInteger32(kScratchRegister, input_reg);
3503 __ cvtlsi2sd(result_reg, kScratchRegister); 3507 __ cvtlsi2sd(result_reg, kScratchRegister);
3504 __ bind(&done); 3508 __ bind(&done);
3505 } 3509 }
3506 3510
3507 3511
3508 class DeferredTaggedToI: public LDeferredCode { 3512 class DeferredTaggedToI: public LDeferredCode {
3509 public: 3513 public:
3510 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr) 3514 DeferredTaggedToI(LCodeGen* codegen, LTaggedToI* instr)
3511 : LDeferredCode(codegen), instr_(instr) { } 3515 : LDeferredCode(codegen), instr_(instr) { }
3512 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); } 3516 virtual void Generate() { codegen()->DoDeferredTaggedToI(instr_); }
3513 private: 3517 private:
3514 LTaggedToI* instr_; 3518 LTaggedToI* instr_;
3515 }; 3519 };
3516 3520
3517 3521
3518 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) { 3522 void LCodeGen::DoDeferredTaggedToI(LTaggedToI* instr) {
3519 NearLabel done, heap_number; 3523 Label done, heap_number;
3520 Register input_reg = ToRegister(instr->InputAt(0)); 3524 Register input_reg = ToRegister(instr->InputAt(0));
3521 3525
3522 // Heap number map check. 3526 // Heap number map check.
3523 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset), 3527 __ CompareRoot(FieldOperand(input_reg, HeapObject::kMapOffset),
3524 Heap::kHeapNumberMapRootIndex); 3528 Heap::kHeapNumberMapRootIndex);
3525 3529
3526 if (instr->truncating()) { 3530 if (instr->truncating()) {
3527 __ j(equal, &heap_number); 3531 __ j(equal, &heap_number, Label::kNear);
3528 // Check for undefined. Undefined is converted to zero for truncating 3532 // Check for undefined. Undefined is converted to zero for truncating
3529 // conversions. 3533 // conversions.
3530 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); 3534 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
3531 DeoptimizeIf(not_equal, instr->environment()); 3535 DeoptimizeIf(not_equal, instr->environment());
3532 __ Set(input_reg, 0); 3536 __ Set(input_reg, 0);
3533 __ jmp(&done); 3537 __ jmp(&done, Label::kNear);
3534 3538
3535 __ bind(&heap_number); 3539 __ bind(&heap_number);
3536 3540
3537 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset)); 3541 __ movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
3538 __ cvttsd2siq(input_reg, xmm0); 3542 __ cvttsd2siq(input_reg, xmm0);
3539 __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000)); 3543 __ Set(kScratchRegister, V8_UINT64_C(0x8000000000000000));
3540 __ cmpq(input_reg, kScratchRegister); 3544 __ cmpq(input_reg, kScratchRegister);
3541 DeoptimizeIf(equal, instr->environment()); 3545 DeoptimizeIf(equal, instr->environment());
3542 } else { 3546 } else {
3543 // Deoptimize if we don't have a heap number. 3547 // Deoptimize if we don't have a heap number.
(...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after
3604 __ movq(kScratchRegister, V8_INT64_C(0x8000000000000000), RelocInfo::NONE); 3608 __ movq(kScratchRegister, V8_INT64_C(0x8000000000000000), RelocInfo::NONE);
3605 __ cmpq(result_reg, kScratchRegister); 3609 __ cmpq(result_reg, kScratchRegister);
3606 DeoptimizeIf(equal, instr->environment()); 3610 DeoptimizeIf(equal, instr->environment());
3607 } else { 3611 } else {
3608 __ cvttsd2si(result_reg, input_reg); 3612 __ cvttsd2si(result_reg, input_reg);
3609 __ cvtlsi2sd(xmm0, result_reg); 3613 __ cvtlsi2sd(xmm0, result_reg);
3610 __ ucomisd(xmm0, input_reg); 3614 __ ucomisd(xmm0, input_reg);
3611 DeoptimizeIf(not_equal, instr->environment()); 3615 DeoptimizeIf(not_equal, instr->environment());
3612 DeoptimizeIf(parity_even, instr->environment()); // NaN. 3616 DeoptimizeIf(parity_even, instr->environment()); // NaN.
3613 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 3617 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
3614 NearLabel done; 3618 Label done;
3615 // The integer converted back is equal to the original. We 3619 // The integer converted back is equal to the original. We
3616 // only have to test if we got -0 as an input. 3620 // only have to test if we got -0 as an input.
3617 __ testl(result_reg, result_reg); 3621 __ testl(result_reg, result_reg);
3618 __ j(not_zero, &done); 3622 __ j(not_zero, &done, Label::kNear);
3619 __ movmskpd(result_reg, input_reg); 3623 __ movmskpd(result_reg, input_reg);
3620 // Bit 0 contains the sign of the double in input_reg. 3624 // Bit 0 contains the sign of the double in input_reg.
3621 // If input was positive, we are ok and return 0, otherwise 3625 // If input was positive, we are ok and return 0, otherwise
3622 // deoptimize. 3626 // deoptimize.
3623 __ andl(result_reg, Immediate(1)); 3627 __ andl(result_reg, Immediate(1));
3624 DeoptimizeIf(not_zero, instr->environment()); 3628 DeoptimizeIf(not_zero, instr->environment());
3625 __ bind(&done); 3629 __ bind(&done);
3626 } 3630 }
3627 } 3631 }
3628 } 3632 }
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after
3776 3780
3777 3781
3778 void LCodeGen::DoToFastProperties(LToFastProperties* instr) { 3782 void LCodeGen::DoToFastProperties(LToFastProperties* instr) {
3779 ASSERT(ToRegister(instr->InputAt(0)).is(rax)); 3783 ASSERT(ToRegister(instr->InputAt(0)).is(rax));
3780 __ push(rax); 3784 __ push(rax);
3781 CallRuntime(Runtime::kToFastProperties, 1, instr); 3785 CallRuntime(Runtime::kToFastProperties, 1, instr);
3782 } 3786 }
3783 3787
3784 3788
3785 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) { 3789 void LCodeGen::DoRegExpLiteral(LRegExpLiteral* instr) {
3786 NearLabel materialized; 3790 Label materialized;
3787 // Registers will be used as follows: 3791 // Registers will be used as follows:
3788 // rdi = JS function. 3792 // rdi = JS function.
3789 // rcx = literals array. 3793 // rcx = literals array.
3790 // rbx = regexp literal. 3794 // rbx = regexp literal.
3791 // rax = regexp literal clone. 3795 // rax = regexp literal clone.
3792 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); 3796 __ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
3793 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset)); 3797 __ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
3794 int literal_offset = FixedArray::kHeaderSize + 3798 int literal_offset = FixedArray::kHeaderSize +
3795 instr->hydrogen()->literal_index() * kPointerSize; 3799 instr->hydrogen()->literal_index() * kPointerSize;
3796 __ movq(rbx, FieldOperand(rcx, literal_offset)); 3800 __ movq(rbx, FieldOperand(rcx, literal_offset));
3797 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex); 3801 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
3798 __ j(not_equal, &materialized); 3802 __ j(not_equal, &materialized, Label::kNear);
3799 3803
3800 // Create regexp literal using runtime function 3804 // Create regexp literal using runtime function
3801 // Result will be in rax. 3805 // Result will be in rax.
3802 __ push(rcx); 3806 __ push(rcx);
3803 __ Push(Smi::FromInt(instr->hydrogen()->literal_index())); 3807 __ Push(Smi::FromInt(instr->hydrogen()->literal_index()));
3804 __ Push(instr->hydrogen()->pattern()); 3808 __ Push(instr->hydrogen()->pattern());
3805 __ Push(instr->hydrogen()->flags()); 3809 __ Push(instr->hydrogen()->flags());
3806 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr); 3810 CallRuntime(Runtime::kMaterializeRegExpLiteral, 4, instr);
3807 __ movq(rbx, rax); 3811 __ movq(rbx, rax);
3808 3812
(...skipping 51 matching lines...) Expand 10 before | Expand all | Expand 10 after
3860 EmitPushTaggedOperand(input); 3864 EmitPushTaggedOperand(input);
3861 CallRuntime(Runtime::kTypeof, 1, instr); 3865 CallRuntime(Runtime::kTypeof, 1, instr);
3862 } 3866 }
3863 3867
3864 3868
3865 void LCodeGen::DoTypeofIs(LTypeofIs* instr) { 3869 void LCodeGen::DoTypeofIs(LTypeofIs* instr) {
3866 Register input = ToRegister(instr->InputAt(0)); 3870 Register input = ToRegister(instr->InputAt(0));
3867 Register result = ToRegister(instr->result()); 3871 Register result = ToRegister(instr->result());
3868 Label true_label; 3872 Label true_label;
3869 Label false_label; 3873 Label false_label;
3870 NearLabel done; 3874 Label done;
3871 3875
3872 Condition final_branch_condition = EmitTypeofIs(&true_label, 3876 Condition final_branch_condition = EmitTypeofIs(&true_label,
3873 &false_label, 3877 &false_label,
3874 input, 3878 input,
3875 instr->type_literal()); 3879 instr->type_literal());
3876 __ j(final_branch_condition, &true_label); 3880 __ j(final_branch_condition, &true_label);
3877 __ bind(&false_label); 3881 __ bind(&false_label);
3878 __ LoadRoot(result, Heap::kFalseValueRootIndex); 3882 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3879 __ jmp(&done); 3883 __ jmp(&done, Label::kNear);
3880 3884
3881 __ bind(&true_label); 3885 __ bind(&true_label);
3882 __ LoadRoot(result, Heap::kTrueValueRootIndex); 3886 __ LoadRoot(result, Heap::kTrueValueRootIndex);
3883 3887
3884 __ bind(&done); 3888 __ bind(&done);
3885 } 3889 }
3886 3890
3887 3891
3888 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { 3892 void LCodeGen::EmitPushTaggedOperand(LOperand* operand) {
3889 ASSERT(!operand->IsDoubleRegister()); 3893 ASSERT(!operand->IsDoubleRegister());
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after
3971 final_branch_condition = never; 3975 final_branch_condition = never;
3972 __ jmp(false_label); 3976 __ jmp(false_label);
3973 } 3977 }
3974 3978
3975 return final_branch_condition; 3979 return final_branch_condition;
3976 } 3980 }
3977 3981
3978 3982
3979 void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) { 3983 void LCodeGen::DoIsConstructCall(LIsConstructCall* instr) {
3980 Register result = ToRegister(instr->result()); 3984 Register result = ToRegister(instr->result());
3981 NearLabel true_label; 3985 Label true_label;
3982 NearLabel false_label; 3986 Label done;
3983 NearLabel done;
3984 3987
3985 EmitIsConstructCall(result); 3988 EmitIsConstructCall(result);
3986 __ j(equal, &true_label); 3989 __ j(equal, &true_label, Label::kNear);
3987 3990
3988 __ LoadRoot(result, Heap::kFalseValueRootIndex); 3991 __ LoadRoot(result, Heap::kFalseValueRootIndex);
3989 __ jmp(&done); 3992 __ jmp(&done, Label::kNear);
3990 3993
3991 __ bind(&true_label); 3994 __ bind(&true_label);
3992 __ LoadRoot(result, Heap::kTrueValueRootIndex); 3995 __ LoadRoot(result, Heap::kTrueValueRootIndex);
3993 3996
3994 3997
3995 __ bind(&done); 3998 __ bind(&done);
3996 } 3999 }
3997 4000
3998 4001
3999 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { 4002 void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) {
4000 Register temp = ToRegister(instr->TempAt(0)); 4003 Register temp = ToRegister(instr->TempAt(0));
4001 int true_block = chunk_->LookupDestination(instr->true_block_id()); 4004 int true_block = chunk_->LookupDestination(instr->true_block_id());
4002 int false_block = chunk_->LookupDestination(instr->false_block_id()); 4005 int false_block = chunk_->LookupDestination(instr->false_block_id());
4003 4006
4004 EmitIsConstructCall(temp); 4007 EmitIsConstructCall(temp);
4005 EmitBranch(true_block, false_block, equal); 4008 EmitBranch(true_block, false_block, equal);
4006 } 4009 }
4007 4010
4008 4011
4009 void LCodeGen::EmitIsConstructCall(Register temp) { 4012 void LCodeGen::EmitIsConstructCall(Register temp) {
4010 // Get the frame pointer for the calling frame. 4013 // Get the frame pointer for the calling frame.
4011 __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); 4014 __ movq(temp, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
4012 4015
4013 // Skip the arguments adaptor frame if it exists. 4016 // Skip the arguments adaptor frame if it exists.
4014 NearLabel check_frame_marker; 4017 Label check_frame_marker;
4015 __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset), 4018 __ Cmp(Operand(temp, StandardFrameConstants::kContextOffset),
4016 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)); 4019 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
4017 __ j(not_equal, &check_frame_marker); 4020 __ j(not_equal, &check_frame_marker, Label::kNear);
4018 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset)); 4021 __ movq(temp, Operand(rax, StandardFrameConstants::kCallerFPOffset));
4019 4022
4020 // Check the marker in the calling frame. 4023 // Check the marker in the calling frame.
4021 __ bind(&check_frame_marker); 4024 __ bind(&check_frame_marker);
4022 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset), 4025 __ Cmp(Operand(temp, StandardFrameConstants::kMarkerOffset),
4023 Smi::FromInt(StackFrame::CONSTRUCT)); 4026 Smi::FromInt(StackFrame::CONSTRUCT));
4024 } 4027 }
4025 4028
4026 4029
4027 void LCodeGen::DoLazyBailout(LLazyBailout* instr) { 4030 void LCodeGen::DoLazyBailout(LLazyBailout* instr) {
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after
4071 // builtin) 4074 // builtin)
4072 SafepointGenerator safepoint_generator(this, 4075 SafepointGenerator safepoint_generator(this,
4073 pointers, 4076 pointers,
4074 env->deoptimization_index()); 4077 env->deoptimization_index());
4075 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator); 4078 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION, safepoint_generator);
4076 } 4079 }
4077 4080
4078 4081
4079 void LCodeGen::DoStackCheck(LStackCheck* instr) { 4082 void LCodeGen::DoStackCheck(LStackCheck* instr) {
4080 // Perform stack overflow check. 4083 // Perform stack overflow check.
4081 NearLabel done; 4084 Label done;
4082 __ CompareRoot(rsp, Heap::kStackLimitRootIndex); 4085 __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
4083 __ j(above_equal, &done); 4086 __ j(above_equal, &done, Label::kNear);
4084 4087
4085 StackCheckStub stub; 4088 StackCheckStub stub;
4086 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); 4089 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
4087 __ bind(&done); 4090 __ bind(&done);
4088 } 4091 }
4089 4092
4090 4093
4091 void LCodeGen::DoOsrEntry(LOsrEntry* instr) { 4094 void LCodeGen::DoOsrEntry(LOsrEntry* instr) {
4092 // This is a pseudo-instruction that ensures that the environment here is 4095 // This is a pseudo-instruction that ensures that the environment here is
4093 // properly registered for deoptimization and records the assembler's PC 4096 // properly registered for deoptimization and records the assembler's PC
4094 // offset. 4097 // offset.
4095 LEnvironment* environment = instr->environment(); 4098 LEnvironment* environment = instr->environment();
4096 environment->SetSpilledRegisters(instr->SpilledRegisterArray(), 4099 environment->SetSpilledRegisters(instr->SpilledRegisterArray(),
4097 instr->SpilledDoubleRegisterArray()); 4100 instr->SpilledDoubleRegisterArray());
4098 4101
4099 // If the environment were already registered, we would have no way of 4102 // If the environment were already registered, we would have no way of
4100 // backpatching it with the spill slot operands. 4103 // backpatching it with the spill slot operands.
4101 ASSERT(!environment->HasBeenRegistered()); 4104 ASSERT(!environment->HasBeenRegistered());
4102 RegisterEnvironmentForDeoptimization(environment); 4105 RegisterEnvironmentForDeoptimization(environment);
4103 ASSERT(osr_pc_offset_ == -1); 4106 ASSERT(osr_pc_offset_ == -1);
4104 osr_pc_offset_ = masm()->pc_offset(); 4107 osr_pc_offset_ = masm()->pc_offset();
4105 } 4108 }
4106 4109
4107 #undef __ 4110 #undef __
4108 4111
4109 } } // namespace v8::internal 4112 } } // namespace v8::internal
4110 4113
4111 #endif // V8_TARGET_ARCH_X64 4114 #endif // V8_TARGET_ARCH_X64
OLDNEW
« no previous file with comments | « src/x64/full-codegen-x64.cc ('k') | src/x64/macro-assembler-x64.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698