OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "lib/error.h" | 10 #include "lib/error.h" |
(...skipping 369 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
380 if (!compiler->is_optimizing()) { | 380 if (!compiler->is_optimizing()) { |
381 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, | 381 compiler->AddCurrentDescriptor(PcDescriptors::kDeopt, |
382 deopt_id, | 382 deopt_id, |
383 token_pos); | 383 token_pos); |
384 } | 384 } |
385 const int kNumberOfArguments = 2; | 385 const int kNumberOfArguments = 2; |
386 const Array& kNoArgumentNames = Array::Handle(); | 386 const Array& kNoArgumentNames = Array::Handle(); |
387 const int kNumArgumentsChecked = 2; | 387 const int kNumArgumentsChecked = 2; |
388 | 388 |
389 __ TraceSimMsg("EmitEqualityAsInstanceCall"); | 389 __ TraceSimMsg("EmitEqualityAsInstanceCall"); |
| 390 __ Comment("EmitEqualityAsInstanceCall"); |
390 Label check_identity; | 391 Label check_identity; |
391 __ lw(A1, Address(SP, 1 * kWordSize)); | 392 __ lw(A1, Address(SP, 1 * kWordSize)); |
392 __ lw(A0, Address(SP, 0 * kWordSize)); | 393 __ lw(A0, Address(SP, 0 * kWordSize)); |
393 __ beq(A1, NULLREG, &check_identity); | 394 __ LoadImmediate(TMP, reinterpret_cast<int32_t>(Object::null())); |
394 __ beq(A0, NULLREG, &check_identity); | 395 __ beq(A1, TMP, &check_identity); |
| 396 __ beq(A0, TMP, &check_identity); |
395 | 397 |
396 ICData& equality_ic_data = ICData::ZoneHandle(); | 398 ICData& equality_ic_data = ICData::ZoneHandle(); |
397 if (compiler->is_optimizing() && FLAG_propagate_ic_data) { | 399 if (compiler->is_optimizing() && FLAG_propagate_ic_data) { |
398 ASSERT(!original_ic_data.IsNull()); | 400 ASSERT(!original_ic_data.IsNull()); |
399 if (original_ic_data.NumberOfChecks() == 0) { | 401 if (original_ic_data.NumberOfChecks() == 0) { |
400 // IC call for reoptimization populates original ICData. | 402 // IC call for reoptimization populates original ICData. |
401 equality_ic_data = original_ic_data.raw(); | 403 equality_ic_data = original_ic_data.raw(); |
402 } else { | 404 } else { |
403 // Megamorphic call. | 405 // Megamorphic call. |
404 equality_ic_data = original_ic_data.AsUnaryClassChecks(); | 406 equality_ic_data = original_ic_data.AsUnaryClassChecks(); |
(...skipping 124 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
529 ASSERT(ic_data.NumberOfChecks() > 0); | 531 ASSERT(ic_data.NumberOfChecks() > 0); |
530 ASSERT(ic_data.num_args_tested() == 1); | 532 ASSERT(ic_data.num_args_tested() == 1); |
531 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); | 533 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); |
532 Register left = locs->in(0).reg(); | 534 Register left = locs->in(0).reg(); |
533 Register right = locs->in(1).reg(); | 535 Register right = locs->in(1).reg(); |
534 ASSERT(left == A1); | 536 ASSERT(left == A1); |
535 ASSERT(right == A0); | 537 ASSERT(right == A0); |
536 Register temp = locs->temp(0).reg(); | 538 Register temp = locs->temp(0).reg(); |
537 | 539 |
538 __ TraceSimMsg("EmitEqualityAsPolymorphicCall"); | 540 __ TraceSimMsg("EmitEqualityAsPolymorphicCall"); |
| 541 __ Comment("EmitEqualityAsPolymorphicCall"); |
539 | 542 |
540 LoadValueCid(compiler, temp, left, | 543 LoadValueCid(compiler, temp, left, |
541 (ic_data.GetReceiverClassIdAt(0) == kSmiCid) ? NULL : deopt); | 544 (ic_data.GetReceiverClassIdAt(0) == kSmiCid) ? NULL : deopt); |
542 // 'temp' contains class-id of the left argument. | 545 // 'temp' contains class-id of the left argument. |
543 ObjectStore* object_store = Isolate::Current()->object_store(); | 546 ObjectStore* object_store = Isolate::Current()->object_store(); |
544 Condition cond = TokenKindToSmiCondition(kind); | 547 Condition cond = TokenKindToSmiCondition(kind); |
545 Label done; | 548 Label done; |
546 const intptr_t len = ic_data.NumberOfChecks(); | 549 const intptr_t len = ic_data.NumberOfChecks(); |
547 for (intptr_t i = 0; i < len; i++) { | 550 for (intptr_t i = 0; i < len; i++) { |
548 // Assert that the Smi is at position 0, if at all. | 551 // Assert that the Smi is at position 0, if at all. |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
601 __ b(&done); | 604 __ b(&done); |
602 __ Bind(&next_test); | 605 __ Bind(&next_test); |
603 } | 606 } |
604 } | 607 } |
605 __ Bind(&done); | 608 __ Bind(&done); |
606 } | 609 } |
607 | 610 |
608 | 611 |
609 // Emit code when ICData's targets are all Object == (which is ===). | 612 // Emit code when ICData's targets are all Object == (which is ===). |
610 static void EmitCheckedStrictEqual(FlowGraphCompiler* compiler, | 613 static void EmitCheckedStrictEqual(FlowGraphCompiler* compiler, |
611 const ICData& ic_data, | 614 const ICData& orig_ic_data, |
612 const LocationSummary& locs, | 615 const LocationSummary& locs, |
613 Token::Kind kind, | 616 Token::Kind kind, |
614 BranchInstr* branch, | 617 BranchInstr* branch, |
615 intptr_t deopt_id) { | 618 intptr_t deopt_id) { |
616 UNIMPLEMENTED(); | 619 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); |
| 620 Register left = locs.in(0).reg(); |
| 621 Register right = locs.in(1).reg(); |
| 622 Register temp = locs.temp(0).reg(); |
| 623 Label* deopt = compiler->AddDeoptStub(deopt_id, kDeoptEquality); |
| 624 |
| 625 __ Comment("CheckedStrictEqual"); |
| 626 |
| 627 __ andi(CMPRES, left, Immediate(kSmiTagMask)); |
| 628 __ beq(CMPRES, ZR, deopt); |
| 629 // 'left' is not Smi. |
| 630 Label identity_compare; |
| 631 __ LoadImmediate(TMP, reinterpret_cast<int32_t>(Object::null())); |
| 632 __ beq(right, TMP, &identity_compare); |
| 633 __ beq(left, TMP, &identity_compare); |
| 634 |
| 635 __ LoadClassId(temp, left); |
| 636 const ICData& ic_data = ICData::Handle(orig_ic_data.AsUnaryClassChecks()); |
| 637 const intptr_t len = ic_data.NumberOfChecks(); |
| 638 for (intptr_t i = 0; i < len; i++) { |
| 639 if (i == (len - 1)) { |
| 640 __ BranchNotEqual(temp, ic_data.GetReceiverClassIdAt(i), deopt); |
| 641 } else { |
| 642 __ BranchEqual(temp, ic_data.GetReceiverClassIdAt(i), &identity_compare); |
| 643 } |
| 644 } |
| 645 __ Bind(&identity_compare); |
| 646 __ subu(CMPRES, left, right); |
| 647 if (branch == NULL) { |
| 648 Label done, is_equal; |
| 649 Register result = locs.out().reg(); |
| 650 __ beq(CMPRES, ZR, &is_equal); |
| 651 // Not equal. |
| 652 __ LoadObject(result, |
| 653 (kind == Token::kEQ) ? Bool::False() : Bool::True()); |
| 654 __ b(&done); |
| 655 __ Bind(&is_equal); |
| 656 __ LoadObject(result, |
| 657 (kind == Token::kEQ) ? Bool::True() : Bool::False()); |
| 658 __ Bind(&done); |
| 659 |
| 660 } else { |
| 661 Condition cond = TokenKindToSmiCondition(kind); |
| 662 __ mov(TMP, ZR); |
| 663 branch->EmitBranchOnCondition(compiler, cond); |
| 664 } |
617 } | 665 } |
618 | 666 |
619 | 667 |
620 // First test if receiver is NULL, in which case === is applied. | 668 // First test if receiver is NULL, in which case === is applied. |
621 // If type feedback was provided (lists of <class-id, target>), do a | 669 // If type feedback was provided (lists of <class-id, target>), do a |
622 // type by type check (either === or static call to the operator. | 670 // type by type check (either === or static call to the operator. |
623 static void EmitGenericEqualityCompare(FlowGraphCompiler* compiler, | 671 static void EmitGenericEqualityCompare(FlowGraphCompiler* compiler, |
624 LocationSummary* locs, | 672 LocationSummary* locs, |
625 Token::Kind kind, | 673 Token::Kind kind, |
626 BranchInstr* branch, | 674 BranchInstr* branch, |
627 const ICData& ic_data, | 675 const ICData& ic_data, |
628 intptr_t deopt_id, | 676 intptr_t deopt_id, |
629 intptr_t token_pos) { | 677 intptr_t token_pos) { |
630 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); | 678 ASSERT((kind == Token::kEQ) || (kind == Token::kNE)); |
631 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); | 679 ASSERT(!ic_data.IsNull() && (ic_data.NumberOfChecks() > 0)); |
632 Register left = locs->in(0).reg(); | 680 Register left = locs->in(0).reg(); |
633 Register right = locs->in(1).reg(); | 681 Register right = locs->in(1).reg(); |
634 Label done, identity_compare, non_null_compare; | 682 Label done, identity_compare, non_null_compare; |
635 __ TraceSimMsg("EmitGenericEqualityCompare"); | 683 __ TraceSimMsg("EmitGenericEqualityCompare"); |
636 __ beq(right, NULLREG, &identity_compare); | 684 __ Comment("EmitGenericEqualityCompare"); |
637 __ bne(left, NULLREG, &non_null_compare); | 685 __ LoadImmediate(TMP, reinterpret_cast<int32_t>(Object::null())); |
| 686 __ beq(right, TMP, &identity_compare); |
| 687 __ bne(left, TMP, &non_null_compare); |
638 | 688 |
639 // Comparison with NULL is "===". | 689 // Comparison with NULL is "===". |
640 __ Bind(&identity_compare); | 690 __ Bind(&identity_compare); |
641 Condition cond = TokenKindToSmiCondition(kind); | 691 Condition cond = TokenKindToSmiCondition(kind); |
642 __ slt(CMPRES, left, right); | 692 __ slt(CMPRES, left, right); |
643 __ slt(TMP1, right, left); | 693 __ slt(TMP1, right, left); |
644 if (branch != NULL) { | 694 if (branch != NULL) { |
645 branch->EmitBranchOnCondition(compiler, cond); | 695 branch->EmitBranchOnCondition(compiler, cond); |
646 } else { | 696 } else { |
647 Register result = locs->out().reg(); | 697 Register result = locs->out().reg(); |
(...skipping 30 matching lines...) Expand all Loading... |
678 return EQ; | 728 return EQ; |
679 } | 729 } |
680 } | 730 } |
681 | 731 |
682 | 732 |
683 static void EmitSmiComparisonOp(FlowGraphCompiler* compiler, | 733 static void EmitSmiComparisonOp(FlowGraphCompiler* compiler, |
684 const LocationSummary& locs, | 734 const LocationSummary& locs, |
685 Token::Kind kind, | 735 Token::Kind kind, |
686 BranchInstr* branch) { | 736 BranchInstr* branch) { |
687 __ TraceSimMsg("EmitSmiComparisonOp"); | 737 __ TraceSimMsg("EmitSmiComparisonOp"); |
| 738 __ Comment("EmitSmiComparisonOp"); |
688 Location left = locs.in(0); | 739 Location left = locs.in(0); |
689 Location right = locs.in(1); | 740 Location right = locs.in(1); |
690 ASSERT(!left.IsConstant() || !right.IsConstant()); | 741 ASSERT(!left.IsConstant() || !right.IsConstant()); |
691 | 742 |
692 Condition true_condition = TokenKindToSmiCondition(kind); | 743 Condition true_condition = TokenKindToSmiCondition(kind); |
693 | 744 |
694 if (left.IsConstant()) { | 745 if (left.IsConstant()) { |
695 __ CompareObject(CMPRES, TMP1, right.reg(), left.constant()); | 746 __ CompareObject(CMPRES, TMP1, right.reg(), left.constant()); |
696 true_condition = FlipCondition(true_condition); | 747 true_condition = FlipCondition(true_condition); |
697 } else if (right.IsConstant()) { | 748 } else if (right.IsConstant()) { |
(...skipping 27 matching lines...) Expand all Loading... |
725 | 776 |
726 | 777 |
727 static void EmitUnboxedMintComparisonOp(FlowGraphCompiler* compiler, | 778 static void EmitUnboxedMintComparisonOp(FlowGraphCompiler* compiler, |
728 const LocationSummary& locs, | 779 const LocationSummary& locs, |
729 Token::Kind kind, | 780 Token::Kind kind, |
730 BranchInstr* branch) { | 781 BranchInstr* branch) { |
731 UNIMPLEMENTED(); | 782 UNIMPLEMENTED(); |
732 } | 783 } |
733 | 784 |
734 | 785 |
| 786 static Condition TokenKindToDoubleCondition(Token::Kind kind) { |
| 787 switch (kind) { |
| 788 case Token::kEQ: return EQ; |
| 789 case Token::kNE: return NE; |
| 790 case Token::kLT: return LT; |
| 791 case Token::kGT: return GT; |
| 792 case Token::kLTE: return LE; |
| 793 case Token::kGTE: return GE; |
| 794 default: |
| 795 UNREACHABLE(); |
| 796 return VS; |
| 797 } |
| 798 } |
| 799 |
| 800 |
735 static void EmitDoubleComparisonOp(FlowGraphCompiler* compiler, | 801 static void EmitDoubleComparisonOp(FlowGraphCompiler* compiler, |
736 const LocationSummary& locs, | 802 const LocationSummary& locs, |
737 Token::Kind kind, | 803 Token::Kind kind, |
738 BranchInstr* branch) { | 804 BranchInstr* branch) { |
739 UNIMPLEMENTED(); | 805 DRegister left = locs.in(0).fpu_reg(); |
| 806 DRegister right = locs.in(1).fpu_reg(); |
| 807 |
| 808 __ Comment("DoubleComparisonOp(left=%d, right=%d)", left, right); |
| 809 |
| 810 Condition true_condition = TokenKindToDoubleCondition(kind); |
| 811 if (branch != NULL) { |
| 812 compiler->EmitDoubleCompareBranch( |
| 813 true_condition, left, right, branch); |
| 814 } else { |
| 815 compiler->EmitDoubleCompareBool( |
| 816 true_condition, left, right, locs.out().reg()); |
| 817 } |
740 } | 818 } |
741 | 819 |
742 | 820 |
743 void EqualityCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 821 void EqualityCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
744 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); | 822 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); |
745 BranchInstr* kNoBranch = NULL; | 823 BranchInstr* kNoBranch = NULL; |
| 824 __ Comment("EqualityCompareInstr"); |
746 if (receiver_class_id() == kSmiCid) { | 825 if (receiver_class_id() == kSmiCid) { |
747 EmitSmiComparisonOp(compiler, *locs(), kind(), kNoBranch); | 826 EmitSmiComparisonOp(compiler, *locs(), kind(), kNoBranch); |
748 return; | 827 return; |
749 } | 828 } |
750 if (receiver_class_id() == kMintCid) { | 829 if (receiver_class_id() == kMintCid) { |
751 EmitUnboxedMintEqualityOp(compiler, *locs(), kind(), kNoBranch); | 830 EmitUnboxedMintEqualityOp(compiler, *locs(), kind(), kNoBranch); |
752 return; | 831 return; |
753 } | 832 } |
754 if (receiver_class_id() == kDoubleCid) { | 833 if (receiver_class_id() == kDoubleCid) { |
755 EmitDoubleComparisonOp(compiler, *locs(), kind(), kNoBranch); | 834 EmitDoubleComparisonOp(compiler, *locs(), kind(), kNoBranch); |
(...skipping 22 matching lines...) Expand all Loading... |
778 kind(), | 857 kind(), |
779 locs(), | 858 locs(), |
780 *ic_data()); | 859 *ic_data()); |
781 ASSERT(locs()->out().reg() == V0); | 860 ASSERT(locs()->out().reg() == V0); |
782 } | 861 } |
783 | 862 |
784 | 863 |
785 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, | 864 void EqualityCompareInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
786 BranchInstr* branch) { | 865 BranchInstr* branch) { |
787 __ TraceSimMsg("EqualityCompareInstr"); | 866 __ TraceSimMsg("EqualityCompareInstr"); |
| 867 __ Comment("EqualityCompareInstr:BranchCode"); |
788 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); | 868 ASSERT((kind() == Token::kNE) || (kind() == Token::kEQ)); |
789 if (receiver_class_id() == kSmiCid) { | 869 if (receiver_class_id() == kSmiCid) { |
790 // Deoptimizes if both arguments not Smi. | 870 // Deoptimizes if both arguments not Smi. |
791 EmitSmiComparisonOp(compiler, *locs(), kind(), branch); | 871 EmitSmiComparisonOp(compiler, *locs(), kind(), branch); |
792 return; | 872 return; |
793 } | 873 } |
794 if (receiver_class_id() == kMintCid) { | 874 if (receiver_class_id() == kMintCid) { |
795 EmitUnboxedMintEqualityOp(compiler, *locs(), kind(), branch); | 875 EmitUnboxedMintEqualityOp(compiler, *locs(), kind(), branch); |
796 return; | 876 return; |
797 } | 877 } |
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
994 __ AddImmediate(A2, FP, (kParamEndSlotFromFp + | 1074 __ AddImmediate(A2, FP, (kParamEndSlotFromFp + |
995 function().NumParameters()) * kWordSize); | 1075 function().NumParameters()) * kWordSize); |
996 } else { | 1076 } else { |
997 __ AddImmediate(A2, FP, kFirstLocalSlotFromFp * kWordSize); | 1077 __ AddImmediate(A2, FP, kFirstLocalSlotFromFp * kWordSize); |
998 } | 1078 } |
999 // Compute the effective address. When running under the simulator, | 1079 // Compute the effective address. When running under the simulator, |
1000 // this is a redirection address that forces the simulator to call | 1080 // this is a redirection address that forces the simulator to call |
1001 // into the runtime system. | 1081 // into the runtime system. |
1002 uword entry = reinterpret_cast<uword>(native_c_function()); | 1082 uword entry = reinterpret_cast<uword>(native_c_function()); |
1003 #if defined(USING_SIMULATOR) | 1083 #if defined(USING_SIMULATOR) |
1004 entry = Simulator::RedirectExternalReference(entry, Simulator::kNativeCall); | 1084 entry = Simulator::RedirectExternalReference(entry, |
| 1085 Simulator::kNativeCall, |
| 1086 function().NumParameters()); |
1005 #endif | 1087 #endif |
1006 __ LoadImmediate(T5, entry); | 1088 __ LoadImmediate(T5, entry); |
1007 __ LoadImmediate(A1, NativeArguments::ComputeArgcTag(function())); | 1089 __ LoadImmediate(A1, NativeArguments::ComputeArgcTag(function())); |
1008 compiler->GenerateCall(token_pos(), | 1090 compiler->GenerateCall(token_pos(), |
1009 &StubCode::CallNativeCFunctionLabel(), | 1091 &StubCode::CallNativeCFunctionLabel(), |
1010 PcDescriptors::kOther, | 1092 PcDescriptors::kOther, |
1011 locs()); | 1093 locs()); |
1012 __ Pop(result); | 1094 __ Pop(result); |
1013 } | 1095 } |
1014 | 1096 |
(...skipping 183 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1198 UNREACHABLE(); | 1280 UNREACHABLE(); |
1199 } | 1281 } |
1200 __ addu(index.reg(), array, index.reg()); | 1282 __ addu(index.reg(), array, index.reg()); |
1201 element_address = Address(index.reg(), | 1283 element_address = Address(index.reg(), |
1202 FlowGraphCompiler::DataOffsetFor(class_id()) - kHeapObjectTag); | 1284 FlowGraphCompiler::DataOffsetFor(class_id()) - kHeapObjectTag); |
1203 } | 1285 } |
1204 | 1286 |
1205 if ((representation() == kUnboxedDouble) || | 1287 if ((representation() == kUnboxedDouble) || |
1206 (representation() == kUnboxedMint) || | 1288 (representation() == kUnboxedMint) || |
1207 (representation() == kUnboxedFloat32x4)) { | 1289 (representation() == kUnboxedFloat32x4)) { |
1208 UNIMPLEMENTED(); | 1290 DRegister result = locs()->out().fpu_reg(); |
| 1291 switch (class_id()) { |
| 1292 case kTypedDataInt32ArrayCid: |
| 1293 UNIMPLEMENTED(); |
| 1294 break; |
| 1295 case kTypedDataUint32ArrayCid: |
| 1296 UNIMPLEMENTED(); |
| 1297 break; |
| 1298 case kTypedDataFloat32ArrayCid: |
| 1299 // Load single precision float and promote to double. |
| 1300 __ lwc1(STMP1, element_address); |
| 1301 __ cvtds(result, STMP1); |
| 1302 break; |
| 1303 case kTypedDataFloat64ArrayCid: |
| 1304 __ LoadDFromOffset(result, index.reg(), |
| 1305 FlowGraphCompiler::DataOffsetFor(class_id()) - kHeapObjectTag); |
| 1306 break; |
| 1307 case kTypedDataFloat32x4ArrayCid: |
| 1308 UNIMPLEMENTED(); |
| 1309 break; |
| 1310 } |
| 1311 return; |
1209 } | 1312 } |
1210 | 1313 |
1211 Register result = locs()->out().reg(); | 1314 Register result = locs()->out().reg(); |
1212 switch (class_id()) { | 1315 switch (class_id()) { |
1213 case kTypedDataInt8ArrayCid: | 1316 case kTypedDataInt8ArrayCid: |
1214 ASSERT(index_scale() == 1); | 1317 ASSERT(index_scale() == 1); |
1215 __ lb(result, element_address); | 1318 __ lb(result, element_address); |
1216 __ SmiTag(result); | 1319 __ SmiTag(result); |
1217 break; | 1320 break; |
1218 case kTypedDataUint8ArrayCid: | 1321 case kTypedDataUint8ArrayCid: |
(...skipping 356 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1575 | 1678 |
1576 if (field_cid != kSmiCid) { | 1679 if (field_cid != kSmiCid) { |
1577 __ beq(CMPRES, ZR, fail); | 1680 __ beq(CMPRES, ZR, fail); |
1578 __ LoadClassId(value_cid_reg, value_reg); | 1681 __ LoadClassId(value_cid_reg, value_reg); |
1579 __ LoadImmediate(TMP1, field_cid); | 1682 __ LoadImmediate(TMP1, field_cid); |
1580 __ subu(CMPRES, value_cid_reg, TMP1); | 1683 __ subu(CMPRES, value_cid_reg, TMP1); |
1581 } | 1684 } |
1582 | 1685 |
1583 if (field().is_nullable() && (field_cid != kNullCid)) { | 1686 if (field().is_nullable() && (field_cid != kNullCid)) { |
1584 __ beq(CMPRES, ZR, &ok); | 1687 __ beq(CMPRES, ZR, &ok); |
1585 __ subu(CMPRES, value_reg, NULLREG); | 1688 __ LoadImmediate(TMP, reinterpret_cast<int32_t>(Object::null())); |
| 1689 __ subu(CMPRES, value_reg, TMP); |
1586 } | 1690 } |
1587 | 1691 |
1588 if (ok_is_fall_through) { | 1692 if (ok_is_fall_through) { |
1589 __ bne(CMPRES, ZR, fail); | 1693 __ bne(CMPRES, ZR, fail); |
1590 } else { | 1694 } else { |
1591 __ beq(CMPRES, ZR, &ok); | 1695 __ beq(CMPRES, ZR, &ok); |
1592 } | 1696 } |
1593 } else { | 1697 } else { |
1594 // Both value's and field's class id is known. | 1698 // Both value's and field's class id is known. |
1595 if ((value_cid != field_cid) && (value_cid != nullability)) { | 1699 if ((value_cid != field_cid) && (value_cid != nullability)) { |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1696 __ StoreIntoObject(temp, | 1800 __ StoreIntoObject(temp, |
1697 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); | 1801 FieldAddress(temp, Field::value_offset()), value, CanValueBeSmi()); |
1698 } else { | 1802 } else { |
1699 __ StoreIntoObjectNoBarrier( | 1803 __ StoreIntoObjectNoBarrier( |
1700 temp, FieldAddress(temp, Field::value_offset()), value); | 1804 temp, FieldAddress(temp, Field::value_offset()), value); |
1701 } | 1805 } |
1702 } | 1806 } |
1703 | 1807 |
1704 | 1808 |
1705 LocationSummary* InstanceOfInstr::MakeLocationSummary() const { | 1809 LocationSummary* InstanceOfInstr::MakeLocationSummary() const { |
1706 UNIMPLEMENTED(); | 1810 const intptr_t kNumInputs = 3; |
1707 return NULL; | 1811 const intptr_t kNumTemps = 0; |
| 1812 LocationSummary* summary = |
| 1813 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
| 1814 summary->set_in(0, Location::RegisterLocation(A0)); |
| 1815 summary->set_in(1, Location::RegisterLocation(A2)); |
| 1816 summary->set_in(2, Location::RegisterLocation(A1)); |
| 1817 summary->set_out(Location::RegisterLocation(V0)); |
| 1818 return summary; |
1708 } | 1819 } |
1709 | 1820 |
1710 | 1821 |
1711 void InstanceOfInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1822 void InstanceOfInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1712 UNIMPLEMENTED(); | 1823 ASSERT(locs()->in(0).reg() == A0); // Value. |
| 1824 ASSERT(locs()->in(1).reg() == A2); // Instantiator. |
| 1825 ASSERT(locs()->in(2).reg() == A1); // Instantiator type arguments. |
| 1826 |
| 1827 __ Comment("InstanceOfInstr"); |
| 1828 compiler->GenerateInstanceOf(token_pos(), |
| 1829 deopt_id(), |
| 1830 type(), |
| 1831 negate_result(), |
| 1832 locs()); |
| 1833 ASSERT(locs()->out().reg() == V0); |
1713 } | 1834 } |
1714 | 1835 |
1715 | 1836 |
1716 LocationSummary* CreateArrayInstr::MakeLocationSummary() const { | 1837 LocationSummary* CreateArrayInstr::MakeLocationSummary() const { |
1717 const intptr_t kNumInputs = 1; | 1838 const intptr_t kNumInputs = 1; |
1718 const intptr_t kNumTemps = 0; | 1839 const intptr_t kNumTemps = 0; |
1719 LocationSummary* locs = | 1840 LocationSummary* locs = |
1720 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); | 1841 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
1721 locs->set_in(0, Location::RegisterLocation(A0)); | 1842 locs->set_in(0, Location::RegisterLocation(A0)); |
1722 locs->set_out(Location::RegisterLocation(V0)); | 1843 locs->set_out(Location::RegisterLocation(V0)); |
1723 return locs; | 1844 return locs; |
1724 } | 1845 } |
1725 | 1846 |
1726 | 1847 |
1727 void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1848 void CreateArrayInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1728 __ TraceSimMsg("CreateArrayInstr"); | 1849 __ TraceSimMsg("CreateArrayInstr"); |
1729 // Allocate the array. A1 = length, A0 = element type. | 1850 // Allocate the array. A1 = length, A0 = element type. |
1730 ASSERT(locs()->in(0).reg() == A0); | 1851 ASSERT(locs()->in(0).reg() == A0); |
1731 __ LoadImmediate(A1, Smi::RawValue(num_elements())); | 1852 __ LoadImmediate(A1, Smi::RawValue(num_elements())); |
1732 compiler->GenerateCall(token_pos(), | 1853 compiler->GenerateCall(token_pos(), |
1733 &StubCode::AllocateArrayLabel(), | 1854 &StubCode::AllocateArrayLabel(), |
1734 PcDescriptors::kOther, | 1855 PcDescriptors::kOther, |
1735 locs()); | 1856 locs()); |
1736 ASSERT(locs()->out().reg() == V0); | 1857 ASSERT(locs()->out().reg() == V0); |
1737 } | 1858 } |
1738 | 1859 |
1739 | 1860 |
1740 LocationSummary* | 1861 LocationSummary* |
1741 AllocateObjectWithBoundsCheckInstr::MakeLocationSummary() const { | 1862 AllocateObjectWithBoundsCheckInstr::MakeLocationSummary() const { |
1742 UNIMPLEMENTED(); | 1863 return MakeCallSummary(); |
1743 return NULL; | |
1744 } | 1864 } |
1745 | 1865 |
1746 | 1866 |
1747 void AllocateObjectWithBoundsCheckInstr::EmitNativeCode( | 1867 void AllocateObjectWithBoundsCheckInstr::EmitNativeCode( |
1748 FlowGraphCompiler* compiler) { | 1868 FlowGraphCompiler* compiler) { |
1749 UNIMPLEMENTED(); | 1869 compiler->GenerateCallRuntime(token_pos(), |
| 1870 deopt_id(), |
| 1871 kAllocateObjectWithBoundsCheckRuntimeEntry, |
| 1872 locs()); |
| 1873 __ Drop(3); |
| 1874 ASSERT(locs()->out().reg() == V0); |
| 1875 __ Pop(V0); // Pop new instance. |
1750 } | 1876 } |
1751 | 1877 |
1752 | 1878 |
1753 LocationSummary* LoadFieldInstr::MakeLocationSummary() const { | 1879 LocationSummary* LoadFieldInstr::MakeLocationSummary() const { |
1754 return LocationSummary::Make(1, | 1880 return LocationSummary::Make(1, |
1755 Location::RequiresRegister(), | 1881 Location::RequiresRegister(), |
1756 LocationSummary::kNoCall); | 1882 LocationSummary::kNoCall); |
1757 } | 1883 } |
1758 | 1884 |
1759 | 1885 |
(...skipping 26 matching lines...) Expand all Loading... |
1786 // (or null). | 1912 // (or null). |
1787 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 1913 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
1788 !type_arguments().CanShareInstantiatorTypeArguments( | 1914 !type_arguments().CanShareInstantiatorTypeArguments( |
1789 instantiator_class())); | 1915 instantiator_class())); |
1790 // If the instantiator is null and if the type argument vector | 1916 // If the instantiator is null and if the type argument vector |
1791 // instantiated from null becomes a vector of dynamic, then use null as | 1917 // instantiated from null becomes a vector of dynamic, then use null as |
1792 // the type arguments. | 1918 // the type arguments. |
1793 Label type_arguments_instantiated; | 1919 Label type_arguments_instantiated; |
1794 const intptr_t len = type_arguments().Length(); | 1920 const intptr_t len = type_arguments().Length(); |
1795 if (type_arguments().IsRawInstantiatedRaw(len)) { | 1921 if (type_arguments().IsRawInstantiatedRaw(len)) { |
1796 __ beq(instantiator_reg, NULLREG, &type_arguments_instantiated); | 1922 __ BranchEqual(instantiator_reg, reinterpret_cast<int32_t>(Object::null()), |
| 1923 &type_arguments_instantiated); |
1797 } | 1924 } |
1798 // Instantiate non-null type arguments. | 1925 // Instantiate non-null type arguments. |
1799 // A runtime call to instantiate the type arguments is required. | 1926 // A runtime call to instantiate the type arguments is required. |
1800 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 1927 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
1801 __ LoadObject(TMP1, Object::ZoneHandle()); | 1928 __ LoadObject(TMP1, Object::ZoneHandle()); |
1802 __ sw(TMP1, Address(SP, 2 * kWordSize)); // Make room for the result. | 1929 __ sw(TMP1, Address(SP, 2 * kWordSize)); // Make room for the result. |
1803 __ LoadObject(TMP1, type_arguments()); | 1930 __ LoadObject(TMP1, type_arguments()); |
1804 __ sw(TMP1, Address(SP, 1 * kWordSize)); | 1931 __ sw(TMP1, Address(SP, 1 * kWordSize)); |
1805 // Push instantiator type arguments. | 1932 // Push instantiator type arguments. |
1806 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); | 1933 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1839 // instantiator_reg is the instantiator type argument vector, i.e. an | 1966 // instantiator_reg is the instantiator type argument vector, i.e. an |
1840 // AbstractTypeArguments object (or null). | 1967 // AbstractTypeArguments object (or null). |
1841 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 1968 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
1842 !type_arguments().CanShareInstantiatorTypeArguments( | 1969 !type_arguments().CanShareInstantiatorTypeArguments( |
1843 instantiator_class())); | 1970 instantiator_class())); |
1844 // If the instantiator is null and if the type argument vector | 1971 // If the instantiator is null and if the type argument vector |
1845 // instantiated from null becomes a vector of dynamic, then use null as | 1972 // instantiated from null becomes a vector of dynamic, then use null as |
1846 // the type arguments. | 1973 // the type arguments. |
1847 Label type_arguments_instantiated; | 1974 Label type_arguments_instantiated; |
1848 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); | 1975 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); |
1849 __ beq(instantiator_reg, NULLREG, &type_arguments_instantiated); | 1976 __ BranchEqual(instantiator_reg, reinterpret_cast<int32_t>(Object::null()), |
| 1977 &type_arguments_instantiated); |
1850 // Instantiate non-null type arguments. | 1978 // Instantiate non-null type arguments. |
1851 // In the non-factory case, we rely on the allocation stub to | 1979 // In the non-factory case, we rely on the allocation stub to |
1852 // instantiate the type arguments. | 1980 // instantiate the type arguments. |
1853 __ LoadObject(result_reg, type_arguments()); | 1981 __ LoadObject(result_reg, type_arguments()); |
1854 // result_reg: uninstantiated type arguments. | 1982 // result_reg: uninstantiated type arguments. |
1855 __ Bind(&type_arguments_instantiated); | 1983 __ Bind(&type_arguments_instantiated); |
1856 | 1984 |
1857 // result_reg: uninstantiated or instantiated type arguments. | 1985 // result_reg: uninstantiated or instantiated type arguments. |
1858 } | 1986 } |
1859 | 1987 |
(...skipping 19 matching lines...) Expand all Loading... |
1879 // (or null). | 2007 // (or null). |
1880 ASSERT(!type_arguments().IsUninstantiatedIdentity() && | 2008 ASSERT(!type_arguments().IsUninstantiatedIdentity() && |
1881 !type_arguments().CanShareInstantiatorTypeArguments( | 2009 !type_arguments().CanShareInstantiatorTypeArguments( |
1882 instantiator_class())); | 2010 instantiator_class())); |
1883 | 2011 |
1884 // If the instantiator is null and if the type argument vector | 2012 // If the instantiator is null and if the type argument vector |
1885 // instantiated from null becomes a vector of dynamic, then use null as | 2013 // instantiated from null becomes a vector of dynamic, then use null as |
1886 // the type arguments and do not pass the instantiator. | 2014 // the type arguments and do not pass the instantiator. |
1887 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); | 2015 ASSERT(type_arguments().IsRawInstantiatedRaw(type_arguments().Length())); |
1888 Label instantiator_not_null; | 2016 Label instantiator_not_null; |
1889 __ bne(instantiator_reg, NULLREG, &instantiator_not_null); | 2017 __ BranchNotEqual(instantiator_reg, reinterpret_cast<int32_t>(Object::null()), |
| 2018 &instantiator_not_null); |
1890 // Null was used in VisitExtractConstructorTypeArguments as the | 2019 // Null was used in VisitExtractConstructorTypeArguments as the |
1891 // instantiated type arguments, no proper instantiator needed. | 2020 // instantiated type arguments, no proper instantiator needed. |
1892 __ LoadImmediate(instantiator_reg, | 2021 __ LoadImmediate(instantiator_reg, |
1893 Smi::RawValue(StubCode::kNoInstantiator)); | 2022 Smi::RawValue(StubCode::kNoInstantiator)); |
1894 __ Bind(&instantiator_not_null); | 2023 __ Bind(&instantiator_not_null); |
1895 // instantiator_reg: instantiator or kNoInstantiator. | 2024 // instantiator_reg: instantiator or kNoInstantiator. |
1896 } | 2025 } |
1897 | 2026 |
1898 | 2027 |
1899 LocationSummary* AllocateContextInstr::MakeLocationSummary() const { | 2028 LocationSummary* AllocateContextInstr::MakeLocationSummary() const { |
(...skipping 160 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2060 | 2189 |
2061 __ TraceSimMsg("EmitSmiShiftLeft"); | 2190 __ TraceSimMsg("EmitSmiShiftLeft"); |
2062 | 2191 |
2063 if (locs.in(1).IsConstant()) { | 2192 if (locs.in(1).IsConstant()) { |
2064 const Object& constant = locs.in(1).constant(); | 2193 const Object& constant = locs.in(1).constant(); |
2065 ASSERT(constant.IsSmi()); | 2194 ASSERT(constant.IsSmi()); |
2066 // Immediate shift operation takes 5 bits for the count. | 2195 // Immediate shift operation takes 5 bits for the count. |
2067 const intptr_t kCountLimit = 0x1F; | 2196 const intptr_t kCountLimit = 0x1F; |
2068 const intptr_t value = Smi::Cast(constant).Value(); | 2197 const intptr_t value = Smi::Cast(constant).Value(); |
2069 if (value == 0) { | 2198 if (value == 0) { |
2070 // No code needed. | 2199 if (result != left) { |
| 2200 __ mov(result, left); |
| 2201 } |
2071 } else if ((value < 0) || (value >= kCountLimit)) { | 2202 } else if ((value < 0) || (value >= kCountLimit)) { |
2072 // This condition may not be known earlier in some cases because | 2203 // This condition may not be known earlier in some cases because |
2073 // of constant propagation, inlining, etc. | 2204 // of constant propagation, inlining, etc. |
2074 if ((value >= kCountLimit) && is_truncating) { | 2205 if ((value >= kCountLimit) && is_truncating) { |
2075 __ mov(result, ZR); | 2206 __ mov(result, ZR); |
2076 } else { | 2207 } else { |
2077 // Result is Mint or exception. | 2208 // Result is Mint or exception. |
2078 __ b(deopt); | 2209 __ b(deopt); |
2079 } | 2210 } |
2080 } else { | 2211 } else { |
(...skipping 13 matching lines...) Expand all Loading... |
2094 Register right = locs.in(1).reg(); | 2225 Register right = locs.in(1).reg(); |
2095 Range* right_range = shift_left->right()->definition()->range(); | 2226 Range* right_range = shift_left->right()->definition()->range(); |
2096 if (shift_left->left()->BindsToConstant() && !is_truncating) { | 2227 if (shift_left->left()->BindsToConstant() && !is_truncating) { |
2097 // TODO(srdjan): Implement code below for is_truncating(). | 2228 // TODO(srdjan): Implement code below for is_truncating(). |
2098 // If left is constant, we know the maximal allowed size for right. | 2229 // If left is constant, we know the maximal allowed size for right. |
2099 const Object& obj = shift_left->left()->BoundConstant(); | 2230 const Object& obj = shift_left->left()->BoundConstant(); |
2100 if (obj.IsSmi()) { | 2231 if (obj.IsSmi()) { |
2101 const intptr_t left_int = Smi::Cast(obj).Value(); | 2232 const intptr_t left_int = Smi::Cast(obj).Value(); |
2102 if (left_int == 0) { | 2233 if (left_int == 0) { |
2103 __ bltz(right, deopt); | 2234 __ bltz(right, deopt); |
| 2235 __ mov(result, ZR); |
2104 return; | 2236 return; |
2105 } | 2237 } |
2106 const intptr_t max_right = kSmiBits - Utils::HighestBit(left_int); | 2238 const intptr_t max_right = kSmiBits - Utils::HighestBit(left_int); |
2107 const bool right_needs_check = | 2239 const bool right_needs_check = |
2108 (right_range == NULL) || | 2240 (right_range == NULL) || |
2109 !right_range->IsWithin(0, max_right - 1); | 2241 !right_range->IsWithin(0, max_right - 1); |
2110 if (right_needs_check) { | 2242 if (right_needs_check) { |
2111 __ BranchUnsignedGreaterEqual( | 2243 __ BranchUnsignedGreaterEqual( |
2112 right, reinterpret_cast<int32_t>(Smi::New(max_right)), deopt); | 2244 right, reinterpret_cast<int32_t>(Smi::New(max_right)), deopt); |
2113 } | 2245 } |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2156 __ srav(TMP1, TMP1, right); | 2288 __ srav(TMP1, TMP1, right); |
2157 __ bne(TMP1, left, deopt); // Overflow. | 2289 __ bne(TMP1, left, deopt); // Overflow. |
2158 // Shift for result now we know there is no overflow. | 2290 // Shift for result now we know there is no overflow. |
2159 __ sllv(result, left, right); | 2291 __ sllv(result, left, right); |
2160 } | 2292 } |
2161 } | 2293 } |
2162 | 2294 |
2163 | 2295 |
2164 LocationSummary* BinarySmiOpInstr::MakeLocationSummary() const { | 2296 LocationSummary* BinarySmiOpInstr::MakeLocationSummary() const { |
2165 const intptr_t kNumInputs = 2; | 2297 const intptr_t kNumInputs = 2; |
| 2298 const intptr_t kNumTemps = op_kind() == Token::kADD ? 1 : 0; |
| 2299 LocationSummary* summary = |
| 2300 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
2166 if (op_kind() == Token::kTRUNCDIV) { | 2301 if (op_kind() == Token::kTRUNCDIV) { |
2167 UNIMPLEMENTED(); | 2302 if (RightIsPowerOfTwoConstant()) { |
2168 return NULL; | 2303 summary->set_in(0, Location::RequiresRegister()); |
2169 } else { | 2304 ConstantInstr* right_constant = right()->definition()->AsConstant(); |
2170 const intptr_t kNumTemps = op_kind() == Token::kADD ? 1 : 0; | 2305 summary->set_in(1, Location::Constant(right_constant->value())); |
2171 LocationSummary* summary = | 2306 summary->set_out(Location::RequiresRegister()); |
2172 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 2307 } else { |
2173 summary->set_in(0, Location::RequiresRegister()); | 2308 // Both inputs must be writable because they will be untagged. |
2174 summary->set_in(1, Location::RegisterOrSmiConstant(right())); | 2309 summary->set_in(0, Location::WritableRegister()); |
2175 if (op_kind() == Token::kADD) { | 2310 summary->set_in(1, Location::WritableRegister()); |
2176 // Need an extra temp for the overflow detection code. | 2311 summary->set_out(Location::RequiresRegister()); |
2177 summary->set_temp(0, Location::RequiresRegister()); | |
2178 } | 2312 } |
2179 // We make use of 3-operand instructions by not requiring result register | |
2180 // to be identical to first input register as on Intel. | |
2181 summary->set_out(Location::RequiresRegister()); | |
2182 return summary; | 2313 return summary; |
2183 } | 2314 } |
| 2315 summary->set_in(0, Location::RequiresRegister()); |
| 2316 summary->set_in(1, Location::RegisterOrSmiConstant(right())); |
| 2317 if (op_kind() == Token::kADD) { |
| 2318 // Need an extra temp for the overflow detection code. |
| 2319 summary->set_temp(0, Location::RequiresRegister()); |
| 2320 } |
| 2321 // We make use of 3-operand instructions by not requiring result register |
| 2322 // to be identical to first input register as on Intel. |
| 2323 summary->set_out(Location::RequiresRegister()); |
| 2324 return summary; |
2184 } | 2325 } |
2185 | 2326 |
2186 | 2327 |
2187 void BinarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2328 void BinarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2188 __ TraceSimMsg("BinarySmiOpInstr"); | 2329 __ TraceSimMsg("BinarySmiOpInstr"); |
2189 if (op_kind() == Token::kSHL) { | 2330 if (op_kind() == Token::kSHL) { |
2190 EmitSmiShiftLeft(compiler, this); | 2331 EmitSmiShiftLeft(compiler, this); |
2191 return; | 2332 return; |
2192 } | 2333 } |
2193 | 2334 |
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2244 __ mult(left, TMP1); | 2385 __ mult(left, TMP1); |
2245 __ mflo(result); | 2386 __ mflo(result); |
2246 __ mfhi(TMP1); | 2387 __ mfhi(TMP1); |
2247 } | 2388 } |
2248 __ sra(CMPRES, result, 31); | 2389 __ sra(CMPRES, result, 31); |
2249 __ bne(TMP1, CMPRES, deopt); | 2390 __ bne(TMP1, CMPRES, deopt); |
2250 } | 2391 } |
2251 break; | 2392 break; |
2252 } | 2393 } |
2253 case Token::kTRUNCDIV: { | 2394 case Token::kTRUNCDIV: { |
2254 UNIMPLEMENTED(); | 2395 const intptr_t value = Smi::Cast(constant).Value(); |
| 2396 if (value == 1) { |
| 2397 if (result != left) { |
| 2398 __ mov(result, left); |
| 2399 } |
| 2400 break; |
| 2401 } else if (value == -1) { |
| 2402 // Check the corner case of dividing the 'MIN_SMI' with -1, in which |
| 2403 // case we cannot negate the result. |
| 2404 __ BranchEqual(left, 0x80000000, deopt); |
| 2405 __ subu(result, ZR, left); |
| 2406 break; |
| 2407 } |
| 2408 ASSERT((value != 0) && Utils::IsPowerOfTwo(Utils::Abs(value))); |
| 2409 const intptr_t shift_count = |
| 2410 Utils::ShiftForPowerOfTwo(Utils::Abs(value)) + kSmiTagSize; |
| 2411 ASSERT(kSmiTagSize == 1); |
| 2412 __ sra(TMP, left, 31); |
| 2413 ASSERT(shift_count > 1); // 1, -1 case handled above. |
| 2414 __ sll(TMP, TMP, 32 - shift_count); |
| 2415 __ addu(left, left, TMP); |
| 2416 ASSERT(shift_count > 0); |
| 2417 __ sra(result, left, shift_count); |
| 2418 if (value < 0) { |
| 2419 __ subu(result, ZR, result); |
| 2420 } |
| 2421 __ SmiTag(result); |
2255 break; | 2422 break; |
2256 } | 2423 } |
2257 case Token::kBIT_AND: { | 2424 case Token::kBIT_AND: { |
2258 // No overflow check. | 2425 // No overflow check. |
2259 if (Utils::IsUint(kImmBits, imm)) { | 2426 if (Utils::IsUint(kImmBits, imm)) { |
2260 __ andi(result, left, Immediate(imm)); | 2427 __ andi(result, left, Immediate(imm)); |
2261 } else { | 2428 } else { |
2262 __ LoadImmediate(TMP1, imm); | 2429 __ LoadImmediate(TMP1, imm); |
2263 __ and_(result, left, TMP1); | 2430 __ and_(result, left, TMP1); |
2264 } | 2431 } |
(...skipping 21 matching lines...) Expand all Loading... |
2286 } | 2453 } |
2287 case Token::kSHR: { | 2454 case Token::kSHR: { |
2288 // sarl operation masks the count to 5 bits. | 2455 // sarl operation masks the count to 5 bits. |
2289 const intptr_t kCountLimit = 0x1F; | 2456 const intptr_t kCountLimit = 0x1F; |
2290 intptr_t value = Smi::Cast(constant).Value(); | 2457 intptr_t value = Smi::Cast(constant).Value(); |
2291 | 2458 |
2292 __ TraceSimMsg("kSHR"); | 2459 __ TraceSimMsg("kSHR"); |
2293 | 2460 |
2294 if (value == 0) { | 2461 if (value == 0) { |
2295 // TODO(vegorov): should be handled outside. | 2462 // TODO(vegorov): should be handled outside. |
2296 __ break_(0); | 2463 if (result != left) { |
| 2464 __ mov(result, left); |
| 2465 } |
2297 break; | 2466 break; |
2298 } else if (value < 0) { | 2467 } else if (value < 0) { |
2299 // TODO(vegorov): should be handled outside. | 2468 // TODO(vegorov): should be handled outside. |
2300 __ b(deopt); | 2469 __ b(deopt); |
2301 break; | 2470 break; |
2302 } | 2471 } |
2303 | 2472 |
2304 value = value + kSmiTagSize; | 2473 value = value + kSmiTagSize; |
2305 if (value >= kCountLimit) value = kCountLimit; | 2474 if (value >= kCountLimit) value = kCountLimit; |
2306 | 2475 |
(...skipping 52 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2359 // No overflow check. | 2528 // No overflow check. |
2360 __ or_(result, left, right); | 2529 __ or_(result, left, right); |
2361 break; | 2530 break; |
2362 } | 2531 } |
2363 case Token::kBIT_XOR: { | 2532 case Token::kBIT_XOR: { |
2364 // No overflow check. | 2533 // No overflow check. |
2365 __ xor_(result, left, right); | 2534 __ xor_(result, left, right); |
2366 break; | 2535 break; |
2367 } | 2536 } |
2368 case Token::kTRUNCDIV: { | 2537 case Token::kTRUNCDIV: { |
2369 UNIMPLEMENTED(); | 2538 // Handle divide by zero in runtime. |
| 2539 __ beq(right, ZR, deopt); |
| 2540 __ SmiUntag(left); |
| 2541 __ SmiUntag(right); |
| 2542 __ div(left, right); |
| 2543 __ mflo(result); |
| 2544 // Check the corner case of dividing the 'MIN_SMI' with -1, in which |
| 2545 // case we cannot tag the result. |
| 2546 __ BranchEqual(V0, 0x40000000, deopt); |
| 2547 __ SmiTag(result); |
2370 break; | 2548 break; |
2371 } | 2549 } |
2372 case Token::kSHR: { | 2550 case Token::kSHR: { |
2373 UNIMPLEMENTED(); | 2551 UNIMPLEMENTED(); |
2374 break; | 2552 break; |
2375 } | 2553 } |
2376 case Token::kDIV: { | 2554 case Token::kDIV: { |
2377 // Dispatches to 'Double./'. | 2555 // Dispatches to 'Double./'. |
2378 // TODO(srdjan): Implement as conversion to double and double division. | 2556 // TODO(srdjan): Implement as conversion to double and double division. |
2379 UNREACHABLE(); | 2557 UNREACHABLE(); |
(...skipping 12 matching lines...) Expand all Loading... |
2392 break; | 2570 break; |
2393 } | 2571 } |
2394 default: | 2572 default: |
2395 UNREACHABLE(); | 2573 UNREACHABLE(); |
2396 break; | 2574 break; |
2397 } | 2575 } |
2398 } | 2576 } |
2399 | 2577 |
2400 | 2578 |
2401 LocationSummary* CheckEitherNonSmiInstr::MakeLocationSummary() const { | 2579 LocationSummary* CheckEitherNonSmiInstr::MakeLocationSummary() const { |
2402 UNIMPLEMENTED(); | 2580 intptr_t left_cid = left()->Type()->ToCid(); |
2403 return NULL; | 2581 intptr_t right_cid = right()->Type()->ToCid(); |
| 2582 ASSERT((left_cid != kDoubleCid) && (right_cid != kDoubleCid)); |
| 2583 const intptr_t kNumInputs = 2; |
| 2584 const intptr_t kNumTemps = 0; |
| 2585 LocationSummary* summary = |
| 2586 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 2587 summary->set_in(0, Location::RequiresRegister()); |
| 2588 summary->set_in(1, Location::RequiresRegister()); |
| 2589 return summary; |
2404 } | 2590 } |
2405 | 2591 |
2406 | 2592 |
2407 void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2593 void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2408 UNIMPLEMENTED(); | 2594 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptBinaryDoubleOp); |
| 2595 intptr_t left_cid = left()->Type()->ToCid(); |
| 2596 intptr_t right_cid = right()->Type()->ToCid(); |
| 2597 Register left = locs()->in(0).reg(); |
| 2598 Register right = locs()->in(1).reg(); |
| 2599 if (left_cid == kSmiCid) { |
| 2600 __ andi(CMPRES, right, Immediate(kSmiTagMask)); |
| 2601 } else if (right_cid == kSmiCid) { |
| 2602 __ andi(CMPRES, left, Immediate(kSmiTagMask)); |
| 2603 } else { |
| 2604 __ or_(TMP, left, right); |
| 2605 __ andi(CMPRES, TMP, Immediate(kSmiTagMask)); |
| 2606 } |
| 2607 __ beq(CMPRES, ZR, deopt); |
2409 } | 2608 } |
2410 | 2609 |
2411 | 2610 |
2412 LocationSummary* BoxDoubleInstr::MakeLocationSummary() const { | 2611 LocationSummary* BoxDoubleInstr::MakeLocationSummary() const { |
2413 UNIMPLEMENTED(); | 2612 const intptr_t kNumInputs = 1; |
2414 return NULL; | 2613 const intptr_t kNumTemps = 0; |
| 2614 LocationSummary* summary = |
| 2615 new LocationSummary(kNumInputs, |
| 2616 kNumTemps, |
| 2617 LocationSummary::kCallOnSlowPath); |
| 2618 summary->set_in(0, Location::RequiresFpuRegister()); |
| 2619 summary->set_out(Location::RequiresRegister()); |
| 2620 return summary; |
2415 } | 2621 } |
2416 | 2622 |
2417 | 2623 |
| 2624 class BoxDoubleSlowPath : public SlowPathCode { |
| 2625 public: |
| 2626 explicit BoxDoubleSlowPath(BoxDoubleInstr* instruction) |
| 2627 : instruction_(instruction) { } |
| 2628 |
| 2629 virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2630 __ Comment("BoxDoubleSlowPath"); |
| 2631 __ Bind(entry_label()); |
| 2632 const Class& double_class = compiler->double_class(); |
| 2633 const Code& stub = |
| 2634 Code::Handle(StubCode::GetAllocationStubForClass(double_class)); |
| 2635 const ExternalLabel label(double_class.ToCString(), stub.EntryPoint()); |
| 2636 |
| 2637 LocationSummary* locs = instruction_->locs(); |
| 2638 locs->live_registers()->Remove(locs->out()); |
| 2639 |
| 2640 compiler->SaveLiveRegisters(locs); |
| 2641 compiler->GenerateCall(Scanner::kDummyTokenIndex, // No token position. |
| 2642 &label, |
| 2643 PcDescriptors::kOther, |
| 2644 locs); |
| 2645 if (locs->out().reg() != V0) { |
| 2646 __ mov(locs->out().reg(), V0); |
| 2647 } |
| 2648 compiler->RestoreLiveRegisters(locs); |
| 2649 |
| 2650 __ b(exit_label()); |
| 2651 } |
| 2652 |
| 2653 private: |
| 2654 BoxDoubleInstr* instruction_; |
| 2655 }; |
| 2656 |
| 2657 |
2418 void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2658 void BoxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2419 UNIMPLEMENTED(); | 2659 BoxDoubleSlowPath* slow_path = new BoxDoubleSlowPath(this); |
| 2660 compiler->AddSlowPathCode(slow_path); |
| 2661 |
| 2662 Register out_reg = locs()->out().reg(); |
| 2663 DRegister value = locs()->in(0).fpu_reg(); |
| 2664 |
| 2665 __ TryAllocate(compiler->double_class(), |
| 2666 slow_path->entry_label(), |
| 2667 out_reg); |
| 2668 __ Bind(slow_path->exit_label()); |
| 2669 __ StoreDToOffset(value, out_reg, Double::value_offset() - kHeapObjectTag); |
2420 } | 2670 } |
2421 | 2671 |
2422 | 2672 |
2423 LocationSummary* UnboxDoubleInstr::MakeLocationSummary() const { | 2673 LocationSummary* UnboxDoubleInstr::MakeLocationSummary() const { |
2424 UNIMPLEMENTED(); | 2674 const intptr_t kNumInputs = 1; |
2425 return NULL; | 2675 const intptr_t value_cid = value()->Type()->ToCid(); |
| 2676 const bool needs_writable_input = (value_cid == kSmiCid); |
| 2677 const intptr_t kNumTemps = 0; |
| 2678 LocationSummary* summary = |
| 2679 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 2680 summary->set_in(0, needs_writable_input |
| 2681 ? Location::WritableRegister() |
| 2682 : Location::RequiresRegister()); |
| 2683 summary->set_out(Location::RequiresFpuRegister()); |
| 2684 return summary; |
2426 } | 2685 } |
2427 | 2686 |
2428 | 2687 |
2429 void UnboxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2688 void UnboxDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2430 UNIMPLEMENTED(); | 2689 const intptr_t value_cid = value()->Type()->ToCid(); |
| 2690 const Register value = locs()->in(0).reg(); |
| 2691 const DRegister result = locs()->out().fpu_reg(); |
| 2692 |
| 2693 if (value_cid == kDoubleCid) { |
| 2694 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); |
| 2695 } else if (value_cid == kSmiCid) { |
| 2696 __ SmiUntag(value); // Untag input before conversion. |
| 2697 __ mtc1(value, STMP1); |
| 2698 __ cvtdw(result, STMP1); |
| 2699 } else { |
| 2700 Label* deopt = compiler->AddDeoptStub(deopt_id_, kDeoptBinaryDoubleOp); |
| 2701 Label is_smi, done; |
| 2702 |
| 2703 __ andi(CMPRES, value, Immediate(kSmiTagMask)); |
| 2704 __ beq(CMPRES, ZR, &is_smi); |
| 2705 __ LoadClassId(TMP, value); |
| 2706 __ BranchNotEqual(TMP, kDoubleCid, deopt); |
| 2707 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); |
| 2708 __ b(&done); |
| 2709 __ Bind(&is_smi); |
| 2710 // TODO(regis): Why do we preserve value here but not above? |
| 2711 __ sra(TMP, value, 1); |
| 2712 __ mtc1(TMP, STMP1); |
| 2713 __ cvtdw(result, STMP1); |
| 2714 __ Bind(&done); |
| 2715 } |
2431 } | 2716 } |
2432 | 2717 |
2433 | 2718 |
2434 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary() const { | 2719 LocationSummary* BoxFloat32x4Instr::MakeLocationSummary() const { |
2435 UNIMPLEMENTED(); | 2720 UNIMPLEMENTED(); |
2436 return NULL; | 2721 return NULL; |
2437 } | 2722 } |
2438 | 2723 |
2439 | 2724 |
2440 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2725 void BoxFloat32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
(...skipping 28 matching lines...) Expand all Loading... |
2469 return NULL; | 2754 return NULL; |
2470 } | 2755 } |
2471 | 2756 |
2472 | 2757 |
2473 void UnboxUint32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2758 void UnboxUint32x4Instr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2474 UNIMPLEMENTED(); | 2759 UNIMPLEMENTED(); |
2475 } | 2760 } |
2476 | 2761 |
2477 | 2762 |
2478 LocationSummary* BinaryDoubleOpInstr::MakeLocationSummary() const { | 2763 LocationSummary* BinaryDoubleOpInstr::MakeLocationSummary() const { |
2479 UNIMPLEMENTED(); | 2764 const intptr_t kNumInputs = 2; |
2480 return NULL; | 2765 const intptr_t kNumTemps = 0; |
| 2766 LocationSummary* summary = |
| 2767 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 2768 summary->set_in(0, Location::RequiresFpuRegister()); |
| 2769 summary->set_in(1, Location::RequiresFpuRegister()); |
| 2770 summary->set_out(Location::RequiresFpuRegister()); |
| 2771 return summary; |
2481 } | 2772 } |
2482 | 2773 |
2483 | 2774 |
2484 void BinaryDoubleOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2775 void BinaryDoubleOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2485 UNIMPLEMENTED(); | 2776 DRegister left = locs()->in(0).fpu_reg(); |
| 2777 DRegister right = locs()->in(1).fpu_reg(); |
| 2778 DRegister result = locs()->out().fpu_reg(); |
| 2779 switch (op_kind()) { |
| 2780 case Token::kADD: __ addd(result, left, right); break; |
| 2781 case Token::kSUB: __ subd(result, left, right); break; |
| 2782 case Token::kMUL: __ muld(result, left, right); break; |
| 2783 case Token::kDIV: __ divd(result, left, right); break; |
| 2784 default: UNREACHABLE(); |
| 2785 } |
2486 } | 2786 } |
2487 | 2787 |
2488 | 2788 |
2489 LocationSummary* BinaryFloat32x4OpInstr::MakeLocationSummary() const { | 2789 LocationSummary* BinaryFloat32x4OpInstr::MakeLocationSummary() const { |
2490 UNIMPLEMENTED(); | 2790 UNIMPLEMENTED(); |
2491 return NULL; | 2791 return NULL; |
2492 } | 2792 } |
2493 | 2793 |
2494 | 2794 |
2495 void BinaryFloat32x4OpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2795 void BinaryFloat32x4OpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2700 return NULL; | 3000 return NULL; |
2701 } | 3001 } |
2702 | 3002 |
2703 | 3003 |
2704 void MathSqrtInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3004 void MathSqrtInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2705 UNIMPLEMENTED(); | 3005 UNIMPLEMENTED(); |
2706 } | 3006 } |
2707 | 3007 |
2708 | 3008 |
2709 LocationSummary* UnarySmiOpInstr::MakeLocationSummary() const { | 3009 LocationSummary* UnarySmiOpInstr::MakeLocationSummary() const { |
2710 UNIMPLEMENTED(); | 3010 const intptr_t kNumInputs = 1; |
2711 return NULL; | 3011 const intptr_t kNumTemps = 0; |
| 3012 LocationSummary* summary = |
| 3013 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 3014 summary->set_in(0, Location::RequiresRegister()); |
| 3015 // We make use of 3-operand instructions by not requiring result register |
| 3016 // to be identical to first input register as on Intel. |
| 3017 summary->set_out(Location::RequiresRegister()); |
| 3018 return summary; |
2712 } | 3019 } |
2713 | 3020 |
2714 | 3021 |
2715 void UnarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3022 void UnarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2716 UNIMPLEMENTED(); | 3023 Register value = locs()->in(0).reg(); |
| 3024 Register result = locs()->out().reg(); |
| 3025 switch (op_kind()) { |
| 3026 case Token::kNEGATE: { |
| 3027 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
| 3028 kDeoptUnaryOp); |
| 3029 __ SubuDetectOverflow(result, ZR, value, CMPRES); |
| 3030 __ bltz(CMPRES, deopt); |
| 3031 break; |
| 3032 } |
| 3033 case Token::kBIT_NOT: |
| 3034 __ nor(result, value, ZR); |
| 3035 __ addiu(result, result, Immediate(-1)); // Remove inverted smi-tag. |
| 3036 break; |
| 3037 default: |
| 3038 UNREACHABLE(); |
| 3039 } |
2717 } | 3040 } |
2718 | 3041 |
2719 | 3042 |
2720 LocationSummary* SmiToDoubleInstr::MakeLocationSummary() const { | 3043 LocationSummary* SmiToDoubleInstr::MakeLocationSummary() const { |
2721 UNIMPLEMENTED(); | 3044 const intptr_t kNumInputs = 1; |
2722 return NULL; | 3045 const intptr_t kNumTemps = 0; |
| 3046 LocationSummary* result = |
| 3047 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 3048 result->set_in(0, Location::WritableRegister()); |
| 3049 result->set_out(Location::RequiresFpuRegister()); |
| 3050 return result; |
2723 } | 3051 } |
2724 | 3052 |
2725 | 3053 |
2726 void SmiToDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3054 void SmiToDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2727 UNIMPLEMENTED(); | 3055 Register value = locs()->in(0).reg(); |
| 3056 FpuRegister result = locs()->out().fpu_reg(); |
| 3057 __ SmiUntag(value); |
| 3058 __ mtc1(value, STMP1); |
| 3059 __ cvtdw(result, STMP1); |
2728 } | 3060 } |
2729 | 3061 |
2730 | 3062 |
2731 LocationSummary* DoubleToIntegerInstr::MakeLocationSummary() const { | 3063 LocationSummary* DoubleToIntegerInstr::MakeLocationSummary() const { |
2732 UNIMPLEMENTED(); | 3064 UNIMPLEMENTED(); |
2733 return NULL; | 3065 return NULL; |
2734 } | 3066 } |
2735 | 3067 |
2736 | 3068 |
2737 void DoubleToIntegerInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3069 void DoubleToIntegerInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
(...skipping 17 matching lines...) Expand all Loading... |
2755 return NULL; | 3087 return NULL; |
2756 } | 3088 } |
2757 | 3089 |
2758 | 3090 |
2759 void DoubleToDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3091 void DoubleToDoubleInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2760 UNIMPLEMENTED(); | 3092 UNIMPLEMENTED(); |
2761 } | 3093 } |
2762 | 3094 |
2763 | 3095 |
2764 LocationSummary* InvokeMathCFunctionInstr::MakeLocationSummary() const { | 3096 LocationSummary* InvokeMathCFunctionInstr::MakeLocationSummary() const { |
2765 UNIMPLEMENTED(); | 3097 ASSERT((InputCount() == 1) || (InputCount() == 2)); |
2766 return NULL; | 3098 const intptr_t kNumTemps = 0; |
| 3099 LocationSummary* result = |
| 3100 new LocationSummary(InputCount(), kNumTemps, LocationSummary::kCall); |
| 3101 result->set_in(0, Location::FpuRegisterLocation(D6)); |
| 3102 if (InputCount() == 2) { |
| 3103 result->set_in(1, Location::FpuRegisterLocation(D7)); |
| 3104 } |
| 3105 result->set_out(Location::FpuRegisterLocation(D0)); |
| 3106 return result; |
2767 } | 3107 } |
2768 | 3108 |
2769 | 3109 |
2770 void InvokeMathCFunctionInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3110 void InvokeMathCFunctionInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2771 UNIMPLEMENTED(); | 3111 // For pow-function return NAN if exponent is NAN. |
| 3112 Label do_call, skip_call; |
| 3113 if (recognized_kind() == MethodRecognizer::kDoublePow) { |
| 3114 DRegister exp = locs()->in(1).fpu_reg(); |
| 3115 __ cund(exp, exp); |
| 3116 __ bc1f(&do_call); |
| 3117 // Exponent is NaN, return NaN. |
| 3118 __ movd(locs()->out().fpu_reg(), exp); |
| 3119 __ b(&skip_call); |
| 3120 } |
| 3121 __ Bind(&do_call); |
| 3122 // double values are passed and returned in vfp registers. |
| 3123 __ CallRuntime(TargetFunction()); |
| 3124 __ Bind(&skip_call); |
2772 } | 3125 } |
2773 | 3126 |
2774 | 3127 |
2775 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary() const { | 3128 LocationSummary* PolymorphicInstanceCallInstr::MakeLocationSummary() const { |
2776 return MakeCallSummary(); | 3129 return MakeCallSummary(); |
2777 } | 3130 } |
2778 | 3131 |
2779 | 3132 |
2780 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3133 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2781 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 3134 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2837 summary->AddTemp(Location::RequiresRegister()); | 3190 summary->AddTemp(Location::RequiresRegister()); |
2838 } | 3191 } |
2839 return summary; | 3192 return summary; |
2840 } | 3193 } |
2841 | 3194 |
2842 | 3195 |
2843 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3196 void CheckClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2844 if (null_check()) { | 3197 if (null_check()) { |
2845 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 3198 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
2846 kDeoptCheckClass); | 3199 kDeoptCheckClass); |
2847 __ beq(locs()->in(0).reg(), NULLREG, deopt); | 3200 __ BranchEqual(locs()->in(0).reg(), |
| 3201 reinterpret_cast<int32_t>(Object::null()), deopt); |
2848 return; | 3202 return; |
2849 } | 3203 } |
2850 | 3204 |
2851 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || | 3205 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || |
2852 (unary_checks().NumberOfChecks() > 1)); | 3206 (unary_checks().NumberOfChecks() > 1)); |
2853 Register value = locs()->in(0).reg(); | 3207 Register value = locs()->in(0).reg(); |
2854 Register temp = locs()->temp(0).reg(); | 3208 Register temp = locs()->temp(0).reg(); |
2855 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 3209 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
2856 kDeoptCheckClass); | 3210 kDeoptCheckClass); |
2857 Label is_ok; | 3211 Label is_ok; |
(...skipping 266 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3124 locs->set_in(0, Location::RegisterOrConstant(left())); | 3478 locs->set_in(0, Location::RegisterOrConstant(left())); |
3125 locs->set_in(1, Location::RegisterOrConstant(right())); | 3479 locs->set_in(1, Location::RegisterOrConstant(right())); |
3126 locs->set_out(Location::RequiresRegister()); | 3480 locs->set_out(Location::RequiresRegister()); |
3127 return locs; | 3481 return locs; |
3128 } | 3482 } |
3129 | 3483 |
3130 | 3484 |
3131 // Special code for numbers (compare values instead of references.) | 3485 // Special code for numbers (compare values instead of references.) |
3132 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3486 void StrictCompareInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3133 __ TraceSimMsg("StrictCompareInstr"); | 3487 __ TraceSimMsg("StrictCompareInstr"); |
| 3488 __ Comment("StrictCompareInstr"); |
3134 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); | 3489 ASSERT(kind() == Token::kEQ_STRICT || kind() == Token::kNE_STRICT); |
3135 Location left = locs()->in(0); | 3490 Location left = locs()->in(0); |
3136 Location right = locs()->in(1); | 3491 Location right = locs()->in(1); |
3137 if (left.IsConstant() && right.IsConstant()) { | 3492 if (left.IsConstant() && right.IsConstant()) { |
3138 // TODO(vegorov): should be eliminated earlier by constant propagation. | 3493 // TODO(vegorov): should be eliminated earlier by constant propagation. |
3139 const bool result = (kind() == Token::kEQ_STRICT) ? | 3494 const bool result = (kind() == Token::kEQ_STRICT) ? |
3140 left.constant().raw() == right.constant().raw() : | 3495 left.constant().raw() == right.constant().raw() : |
3141 left.constant().raw() != right.constant().raw(); | 3496 left.constant().raw() != right.constant().raw(); |
3142 __ LoadObject(locs()->out().reg(), result ? Bool::True() : Bool::False()); | 3497 __ LoadObject(locs()->out().reg(), result ? Bool::True() : Bool::False()); |
3143 return; | 3498 return; |
(...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3274 } | 3629 } |
3275 | 3630 |
3276 | 3631 |
3277 LocationSummary* AllocateObjectInstr::MakeLocationSummary() const { | 3632 LocationSummary* AllocateObjectInstr::MakeLocationSummary() const { |
3278 return MakeCallSummary(); | 3633 return MakeCallSummary(); |
3279 } | 3634 } |
3280 | 3635 |
3281 | 3636 |
3282 void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3637 void AllocateObjectInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3283 __ TraceSimMsg("AllocateObjectInstr"); | 3638 __ TraceSimMsg("AllocateObjectInstr"); |
| 3639 __ Comment("AllocateObjectInstr"); |
3284 const Class& cls = Class::ZoneHandle(constructor().Owner()); | 3640 const Class& cls = Class::ZoneHandle(constructor().Owner()); |
3285 const Code& stub = Code::Handle(StubCode::GetAllocationStubForClass(cls)); | 3641 const Code& stub = Code::Handle(StubCode::GetAllocationStubForClass(cls)); |
3286 const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); | 3642 const ExternalLabel label(cls.ToCString(), stub.EntryPoint()); |
3287 compiler->GenerateCall(token_pos(), | 3643 compiler->GenerateCall(token_pos(), |
3288 &label, | 3644 &label, |
3289 PcDescriptors::kOther, | 3645 PcDescriptors::kOther, |
3290 locs()); | 3646 locs()); |
3291 __ Drop(ArgumentCount()); // Discard arguments. | 3647 __ Drop(ArgumentCount()); // Discard arguments. |
3292 } | 3648 } |
3293 | 3649 |
3294 | 3650 |
3295 LocationSummary* CreateClosureInstr::MakeLocationSummary() const { | 3651 LocationSummary* CreateClosureInstr::MakeLocationSummary() const { |
3296 return MakeCallSummary(); | 3652 return MakeCallSummary(); |
3297 } | 3653 } |
3298 | 3654 |
3299 | 3655 |
3300 void CreateClosureInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3656 void CreateClosureInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3657 __ Comment("CreateClosureInstr"); |
3301 const Function& closure_function = function(); | 3658 const Function& closure_function = function(); |
3302 ASSERT(!closure_function.IsImplicitStaticClosureFunction()); | 3659 ASSERT(!closure_function.IsImplicitStaticClosureFunction()); |
3303 const Code& stub = Code::Handle( | 3660 const Code& stub = Code::Handle( |
3304 StubCode::GetAllocationStubForClosure(closure_function)); | 3661 StubCode::GetAllocationStubForClosure(closure_function)); |
3305 const ExternalLabel label(closure_function.ToCString(), stub.EntryPoint()); | 3662 const ExternalLabel label(closure_function.ToCString(), stub.EntryPoint()); |
3306 compiler->GenerateCall(token_pos(), | 3663 compiler->GenerateCall(token_pos(), |
3307 &label, | 3664 &label, |
3308 PcDescriptors::kOther, | 3665 PcDescriptors::kOther, |
3309 locs()); | 3666 locs()); |
3310 __ Drop(2); // Discard type arguments and receiver. | 3667 __ Drop(2); // Discard type arguments and receiver. |
3311 } | 3668 } |
3312 | 3669 |
3313 } // namespace dart | 3670 } // namespace dart |
3314 | 3671 |
3315 #endif // defined TARGET_ARCH_MIPS | 3672 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |