| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
| 6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
| 7 | 7 |
| 8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
| 9 | 9 |
| 10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
| (...skipping 455 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 466 case Token::kGT: return GT; | 466 case Token::kGT: return GT; |
| 467 case Token::kLTE: return LE; | 467 case Token::kLTE: return LE; |
| 468 case Token::kGTE: return GE; | 468 case Token::kGTE: return GE; |
| 469 default: | 469 default: |
| 470 UNREACHABLE(); | 470 UNREACHABLE(); |
| 471 return VS; | 471 return VS; |
| 472 } | 472 } |
| 473 } | 473 } |
| 474 | 474 |
| 475 | 475 |
| 476 // Branches on condition c assuming comparison results in CMPRES and CMPRES2. | 476 // Branches on condition c assuming comparison results in CMPRES1 and CMPRES2. |
| 477 static void EmitBranchAfterCompare( | 477 static void EmitBranchAfterCompare( |
| 478 FlowGraphCompiler* compiler, Condition c, Label* is_true) { | 478 FlowGraphCompiler* compiler, Condition c, Label* is_true) { |
| 479 switch (c) { | 479 switch (c) { |
| 480 case EQ: __ beq(CMPRES1, CMPRES2, is_true); break; | 480 case EQ: __ beq(CMPRES1, CMPRES2, is_true); break; |
| 481 case NE: __ bne(CMPRES1, CMPRES2, is_true); break; | 481 case NE: __ bne(CMPRES1, CMPRES2, is_true); break; |
| 482 case GT: __ bne(CMPRES2, ZR, is_true); break; | 482 case GT: __ bne(CMPRES2, ZR, is_true); break; |
| 483 case GE: __ beq(CMPRES1, ZR, is_true); break; | 483 case GE: __ beq(CMPRES1, ZR, is_true); break; |
| 484 case LT: __ bne(CMPRES1, ZR, is_true); break; | 484 case LT: __ bne(CMPRES1, ZR, is_true); break; |
| 485 case LE: __ beq(CMPRES2, ZR, is_true); break; | 485 case LE: __ beq(CMPRES2, ZR, is_true); break; |
| 486 default: | 486 default: |
| (...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 628 return; | 628 return; |
| 629 } | 629 } |
| 630 if (operation_cid() == kDoubleCid) { | 630 if (operation_cid() == kDoubleCid) { |
| 631 EmitDoubleComparisonOp(compiler, *locs(), kind(), branch); | 631 EmitDoubleComparisonOp(compiler, *locs(), kind(), branch); |
| 632 return; | 632 return; |
| 633 } | 633 } |
| 634 UNREACHABLE(); | 634 UNREACHABLE(); |
| 635 } | 635 } |
| 636 | 636 |
| 637 | 637 |
| 638 LocationSummary* TestSmiInstr::MakeLocationSummary() const { |
| 639 const intptr_t kNumInputs = 2; |
| 640 const intptr_t kNumTemps = 0; |
| 641 LocationSummary* locs = |
| 642 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 643 locs->set_in(0, Location::RequiresRegister()); |
| 644 // Only one input can be a constant operand. The case of two constant |
| 645 // operands should be handled by constant propagation. |
| 646 locs->set_in(1, Location::RegisterOrConstant(right())); |
| 647 return locs; |
| 648 } |
| 649 |
| 650 |
| 651 void TestSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 652 // Never emitted outside of the BranchInstr. |
| 653 UNREACHABLE(); |
| 654 } |
| 655 |
| 656 |
| 657 void TestSmiInstr::EmitBranchCode(FlowGraphCompiler* compiler, |
| 658 BranchInstr* branch) { |
| 659 Condition branch_condition = (kind() == Token::kNE) ? NE : EQ; |
| 660 Register left = locs()->in(0).reg(); |
| 661 Location right = locs()->in(1); |
| 662 if (right.IsConstant()) { |
| 663 ASSERT(right.constant().IsSmi()); |
| 664 const int32_t imm = |
| 665 reinterpret_cast<int32_t>(right.constant().raw()); |
| 666 __ AndImmediate(CMPRES1, left, imm); |
| 667 } else { |
| 668 __ and_(CMPRES1, left, right.reg()); |
| 669 } |
| 670 __ mov(CMPRES2, ZR); |
| 671 branch->EmitBranchOnCondition(compiler, branch_condition); |
| 672 } |
| 673 |
| 674 |
| 638 LocationSummary* RelationalOpInstr::MakeLocationSummary() const { | 675 LocationSummary* RelationalOpInstr::MakeLocationSummary() const { |
| 639 const intptr_t kNumInputs = 2; | 676 const intptr_t kNumInputs = 2; |
| 640 const intptr_t kNumTemps = 0; | 677 const intptr_t kNumTemps = 0; |
| 641 if (operation_cid() == kMintCid) { | 678 if (operation_cid() == kMintCid) { |
| 642 const intptr_t kNumTemps = 2; | 679 const intptr_t kNumTemps = 2; |
| 643 LocationSummary* locs = | 680 LocationSummary* locs = |
| 644 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 681 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 645 locs->set_in(0, Location::RequiresFpuRegister()); | 682 locs->set_in(0, Location::RequiresFpuRegister()); |
| 646 locs->set_in(1, Location::RequiresFpuRegister()); | 683 locs->set_in(1, Location::RequiresFpuRegister()); |
| 647 locs->set_temp(0, Location::RequiresRegister()); | 684 locs->set_temp(0, Location::RequiresRegister()); |
| (...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 770 | 807 |
| 771 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 808 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 772 Register char_code = locs()->in(0).reg(); | 809 Register char_code = locs()->in(0).reg(); |
| 773 Register result = locs()->out().reg(); | 810 Register result = locs()->out().reg(); |
| 774 | 811 |
| 775 __ TraceSimMsg("StringFromCharCodeInstr"); | 812 __ TraceSimMsg("StringFromCharCodeInstr"); |
| 776 | 813 |
| 777 __ LoadImmediate(result, | 814 __ LoadImmediate(result, |
| 778 reinterpret_cast<uword>(Symbols::PredefinedAddress())); | 815 reinterpret_cast<uword>(Symbols::PredefinedAddress())); |
| 779 __ AddImmediate(result, Symbols::kNullCharCodeSymbolOffset * kWordSize); | 816 __ AddImmediate(result, Symbols::kNullCharCodeSymbolOffset * kWordSize); |
| 780 __ sll(TMP1, char_code, 1); // Char code is a smi. | 817 __ sll(TMP, char_code, 1); // Char code is a smi. |
| 781 __ addu(TMP1, TMP1, result); | 818 __ addu(TMP, TMP, result); |
| 782 __ lw(result, Address(TMP1)); | 819 __ lw(result, Address(TMP)); |
| 783 } | 820 } |
| 784 | 821 |
| 785 | 822 |
| 786 LocationSummary* StringInterpolateInstr::MakeLocationSummary() const { | 823 LocationSummary* StringInterpolateInstr::MakeLocationSummary() const { |
| 787 const intptr_t kNumInputs = 1; | 824 const intptr_t kNumInputs = 1; |
| 788 const intptr_t kNumTemps = 0; | 825 const intptr_t kNumTemps = 0; |
| 789 LocationSummary* summary = | 826 LocationSummary* summary = |
| 790 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); | 827 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
| 791 summary->set_in(0, Location::RegisterLocation(A0)); | 828 summary->set_in(0, Location::RegisterLocation(A0)); |
| 792 summary->set_out(Location::RegisterLocation(V0)); | 829 summary->set_out(Location::RegisterLocation(V0)); |
| (...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 829 return LocationSummary::Make(kNumInputs, | 866 return LocationSummary::Make(kNumInputs, |
| 830 Location::RequiresRegister(), | 867 Location::RequiresRegister(), |
| 831 LocationSummary::kNoCall); | 868 LocationSummary::kNoCall); |
| 832 } | 869 } |
| 833 | 870 |
| 834 | 871 |
| 835 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 872 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 836 Register object = locs()->in(0).reg(); | 873 Register object = locs()->in(0).reg(); |
| 837 Register result = locs()->out().reg(); | 874 Register result = locs()->out().reg(); |
| 838 Label load, done; | 875 Label load, done; |
| 839 __ andi(CMPRES, object, Immediate(kSmiTagMask)); | 876 __ andi(CMPRES1, object, Immediate(kSmiTagMask)); |
| 840 __ bne(CMPRES, ZR, &load); | 877 __ bne(CMPRES1, ZR, &load); |
| 841 __ LoadImmediate(result, Smi::RawValue(kSmiCid)); | 878 __ LoadImmediate(result, Smi::RawValue(kSmiCid)); |
| 842 __ b(&done); | 879 __ b(&done); |
| 843 __ Bind(&load); | 880 __ Bind(&load); |
| 844 __ LoadClassId(result, object); | 881 __ LoadClassId(result, object); |
| 845 __ SmiTag(result); | 882 __ SmiTag(result); |
| 846 __ Bind(&done); | 883 __ Bind(&done); |
| 847 } | 884 } |
| 848 | 885 |
| 849 | 886 |
| 850 CompileType LoadIndexedInstr::ComputeType() const { | 887 CompileType LoadIndexedInstr::ComputeType() const { |
| (...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1048 __ lw(result, element_address); | 1085 __ lw(result, element_address); |
| 1049 // Verify that the signed value in 'result' can fit inside a Smi. | 1086 // Verify that the signed value in 'result' can fit inside a Smi. |
| 1050 __ BranchSignedLess(result, 0xC0000000, deopt); | 1087 __ BranchSignedLess(result, 0xC0000000, deopt); |
| 1051 __ SmiTag(result); | 1088 __ SmiTag(result); |
| 1052 } | 1089 } |
| 1053 break; | 1090 break; |
| 1054 case kTypedDataUint32ArrayCid: { | 1091 case kTypedDataUint32ArrayCid: { |
| 1055 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptUint32Load); | 1092 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptUint32Load); |
| 1056 __ lw(result, element_address); | 1093 __ lw(result, element_address); |
| 1057 // Verify that the unsigned value in 'result' can fit inside a Smi. | 1094 // Verify that the unsigned value in 'result' can fit inside a Smi. |
| 1058 __ LoadImmediate(TMP1, 0xC0000000); | 1095 __ LoadImmediate(TMP, 0xC0000000); |
| 1059 __ and_(CMPRES, result, TMP1); | 1096 __ and_(CMPRES1, result, TMP); |
| 1060 __ bne(CMPRES, ZR, deopt); | 1097 __ bne(CMPRES1, ZR, deopt); |
| 1061 __ SmiTag(result); | 1098 __ SmiTag(result); |
| 1062 } | 1099 } |
| 1063 break; | 1100 break; |
| 1064 default: | 1101 default: |
| 1065 ASSERT((class_id() == kArrayCid) || (class_id() == kImmutableArrayCid)); | 1102 ASSERT((class_id() == kArrayCid) || (class_id() == kImmutableArrayCid)); |
| 1066 __ lw(result, element_address); | 1103 __ lw(result, element_address); |
| 1067 break; | 1104 break; |
| 1068 } | 1105 } |
| 1069 } | 1106 } |
| 1070 | 1107 |
| (...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1233 value = 0; | 1270 value = 0; |
| 1234 } | 1271 } |
| 1235 __ LoadImmediate(TMP, static_cast<int8_t>(value)); | 1272 __ LoadImmediate(TMP, static_cast<int8_t>(value)); |
| 1236 __ sb(TMP, element_address); | 1273 __ sb(TMP, element_address); |
| 1237 } else { | 1274 } else { |
| 1238 Register value = locs()->in(2).reg(); | 1275 Register value = locs()->in(2).reg(); |
| 1239 Label store_value, bigger, smaller; | 1276 Label store_value, bigger, smaller; |
| 1240 __ SmiUntag(value); | 1277 __ SmiUntag(value); |
| 1241 __ BranchUnsignedLess(value, 0xFF + 1, &store_value); | 1278 __ BranchUnsignedLess(value, 0xFF + 1, &store_value); |
| 1242 __ LoadImmediate(TMP, 0xFF); | 1279 __ LoadImmediate(TMP, 0xFF); |
| 1243 __ slti(CMPRES, value, Immediate(1)); | 1280 __ slti(CMPRES1, value, Immediate(1)); |
| 1244 __ movn(TMP, ZR, CMPRES); | 1281 __ movn(TMP, ZR, CMPRES1); |
| 1245 __ mov(value, TMP); | 1282 __ mov(value, TMP); |
| 1246 __ Bind(&store_value); | 1283 __ Bind(&store_value); |
| 1247 __ sb(value, element_address); | 1284 __ sb(value, element_address); |
| 1248 } | 1285 } |
| 1249 break; | 1286 break; |
| 1250 } | 1287 } |
| 1251 case kTypedDataInt16ArrayCid: | 1288 case kTypedDataInt16ArrayCid: |
| 1252 case kTypedDataUint16ArrayCid: { | 1289 case kTypedDataUint16ArrayCid: { |
| 1253 Register value = locs()->in(2).reg(); | 1290 Register value = locs()->in(2).reg(); |
| 1254 __ SmiUntag(value); | 1291 __ SmiUntag(value); |
| (...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1422 __ lw(CMPRES1, field_length_operand); | 1459 __ lw(CMPRES1, field_length_operand); |
| 1423 __ subu(CMPRES1, TMP, CMPRES1); | 1460 __ subu(CMPRES1, TMP, CMPRES1); |
| 1424 __ b(&length_compared); | 1461 __ b(&length_compared); |
| 1425 __ Bind(&no_fixed_length); | 1462 __ Bind(&no_fixed_length); |
| 1426 __ b(fail); | 1463 __ b(fail); |
| 1427 __ Bind(&length_compared); | 1464 __ Bind(&length_compared); |
| 1428 } | 1465 } |
| 1429 __ bne(CMPRES1, ZR, fail); | 1466 __ bne(CMPRES1, ZR, fail); |
| 1430 } | 1467 } |
| 1431 __ Bind(&skip_length_check); | 1468 __ Bind(&skip_length_check); |
| 1432 __ lw(TMP1, field_nullability_operand); | 1469 __ lw(TMP, field_nullability_operand); |
| 1433 __ subu(CMPRES, value_cid_reg, TMP1); | 1470 __ subu(CMPRES1, value_cid_reg, TMP); |
| 1434 } else if (value_cid == kNullCid) { | 1471 } else if (value_cid == kNullCid) { |
| 1435 // TODO(regis): TMP1 may conflict. Revisit. | 1472 __ lw(TMP, field_nullability_operand); |
| 1436 __ lw(TMP1, field_nullability_operand); | 1473 __ LoadImmediate(CMPRES1, value_cid); |
| 1437 __ LoadImmediate(CMPRES, value_cid); | 1474 __ subu(CMPRES1, TMP, CMPRES1); |
| 1438 __ subu(CMPRES, TMP1, CMPRES); | |
| 1439 } else { | 1475 } else { |
| 1440 Label skip_length_check; | 1476 Label skip_length_check; |
| 1441 // TODO(regis): TMP1 may conflict. Revisit. | 1477 __ lw(TMP, field_cid_operand); |
| 1442 __ lw(TMP1, field_cid_operand); | 1478 __ LoadImmediate(CMPRES1, value_cid); |
| 1443 __ LoadImmediate(CMPRES, value_cid); | 1479 __ subu(CMPRES1, TMP, CMPRES1); |
| 1444 __ subu(CMPRES, TMP1, CMPRES); | 1480 __ bne(CMPRES1, ZR, &skip_length_check); |
| 1445 __ bne(CMPRES, ZR, &skip_length_check); | |
| 1446 // Insert length check. | 1481 // Insert length check. |
| 1447 if (field_has_length) { | 1482 if (field_has_length) { |
| 1448 ASSERT(value_cid_reg != kNoRegister); | 1483 ASSERT(value_cid_reg != kNoRegister); |
| 1449 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { | 1484 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { |
| 1450 __ lw(TMP, FieldAddress(value_reg, Array::length_offset())); | 1485 __ lw(TMP, FieldAddress(value_reg, Array::length_offset())); |
| 1451 __ LoadImmediate(CMPRES, Smi::RawValue(field_length)); | 1486 __ LoadImmediate(CMPRES1, Smi::RawValue(field_length)); |
| 1452 __ subu(CMPRES, TMP, CMPRES); | 1487 __ subu(CMPRES1, TMP, CMPRES1); |
| 1453 } else if (RawObject::IsTypedDataClassId(value_cid)) { | 1488 } else if (RawObject::IsTypedDataClassId(value_cid)) { |
| 1454 __ lw(TMP, FieldAddress(value_reg, TypedData::length_offset())); | 1489 __ lw(TMP, FieldAddress(value_reg, TypedData::length_offset())); |
| 1455 __ LoadImmediate(CMPRES, Smi::RawValue(field_length)); | 1490 __ LoadImmediate(CMPRES1, Smi::RawValue(field_length)); |
| 1456 __ subu(CMPRES, TMP, CMPRES); | 1491 __ subu(CMPRES1, TMP, CMPRES1); |
| 1457 } else if (field_cid != kIllegalCid) { | 1492 } else if (field_cid != kIllegalCid) { |
| 1458 ASSERT(field_cid != value_cid); | 1493 ASSERT(field_cid != value_cid); |
| 1459 ASSERT(field_length >= 0); | 1494 ASSERT(field_length >= 0); |
| 1460 // Field has a known class id and length. At compile time it is | 1495 // Field has a known class id and length. At compile time it is |
| 1461 // known that the value's class id is not a fixed length list. | 1496 // known that the value's class id is not a fixed length list. |
| 1462 __ b(fail); | 1497 __ b(fail); |
| 1463 } else { | 1498 } else { |
| 1464 ASSERT(field_cid == kIllegalCid); | 1499 ASSERT(field_cid == kIllegalCid); |
| 1465 ASSERT(field_length == Field::kUnknownFixedLength); | 1500 ASSERT(field_length == Field::kUnknownFixedLength); |
| 1466 // Following jump cannot not occur, fall through. | 1501 // Following jump cannot not occur, fall through. |
| 1467 } | 1502 } |
| 1468 __ bne(CMPRES, ZR, fail); | 1503 __ bne(CMPRES1, ZR, fail); |
| 1469 } | 1504 } |
| 1470 __ Bind(&skip_length_check); | 1505 __ Bind(&skip_length_check); |
| 1471 } | 1506 } |
| 1472 __ beq(CMPRES, ZR, &ok); | 1507 __ beq(CMPRES1, ZR, &ok); |
| 1473 | 1508 |
| 1474 __ lw(CMPRES1, field_cid_operand); | 1509 __ lw(CMPRES1, field_cid_operand); |
| 1475 __ BranchNotEqual(CMPRES1, kIllegalCid, fail); | 1510 __ BranchNotEqual(CMPRES1, kIllegalCid, fail); |
| 1476 | 1511 |
| 1477 if (value_cid == kDynamicCid) { | 1512 if (value_cid == kDynamicCid) { |
| 1478 __ sw(value_cid_reg, field_cid_operand); | 1513 __ sw(value_cid_reg, field_cid_operand); |
| 1479 __ sw(value_cid_reg, field_nullability_operand); | 1514 __ sw(value_cid_reg, field_nullability_operand); |
| 1480 if (field_has_length) { | 1515 if (field_has_length) { |
| 1481 Label check_array, length_set, no_fixed_length; | 1516 Label check_array, length_set, no_fixed_length; |
| 1482 __ BranchEqual(value_cid_reg, kNullCid, &no_fixed_length); | 1517 __ BranchEqual(value_cid_reg, kNullCid, &no_fixed_length); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 1496 __ BranchSignedGreater(value_cid_reg, kImmutableArrayCid, | 1531 __ BranchSignedGreater(value_cid_reg, kImmutableArrayCid, |
| 1497 &no_fixed_length); | 1532 &no_fixed_length); |
| 1498 __ BranchSignedLess(value_cid_reg, kArrayCid, &no_fixed_length); | 1533 __ BranchSignedLess(value_cid_reg, kArrayCid, &no_fixed_length); |
| 1499 // Destroy value_cid_reg (safe because we are finished with it). | 1534 // Destroy value_cid_reg (safe because we are finished with it). |
| 1500 __ lw(value_cid_reg, | 1535 __ lw(value_cid_reg, |
| 1501 FieldAddress(value_reg, Array::length_offset())); | 1536 FieldAddress(value_reg, Array::length_offset())); |
| 1502 __ sw(value_cid_reg, field_length_operand); | 1537 __ sw(value_cid_reg, field_length_operand); |
| 1503 // Updated field length from regular array. | 1538 // Updated field length from regular array. |
| 1504 __ b(&length_set); | 1539 __ b(&length_set); |
| 1505 __ Bind(&no_fixed_length); | 1540 __ Bind(&no_fixed_length); |
| 1506 // TODO(regis): TMP1 may conflict. Revisit. | 1541 __ LoadImmediate(TMP, Smi::RawValue(Field::kNoFixedLength)); |
| 1507 __ LoadImmediate(TMP1, Smi::RawValue(Field::kNoFixedLength)); | 1542 __ sw(TMP, field_length_operand); |
| 1508 __ sw(TMP1, field_length_operand); | |
| 1509 __ Bind(&length_set); | 1543 __ Bind(&length_set); |
| 1510 } | 1544 } |
| 1511 } else { | 1545 } else { |
| 1512 ASSERT(field_reg != kNoRegister); | 1546 ASSERT(field_reg != kNoRegister); |
| 1513 __ LoadImmediate(TMP1, value_cid); | 1547 __ LoadImmediate(TMP, value_cid); |
| 1514 __ sw(TMP1, field_cid_operand); | 1548 __ sw(TMP, field_cid_operand); |
| 1515 __ sw(TMP1, field_nullability_operand); | 1549 __ sw(TMP, field_nullability_operand); |
| 1516 if (field_has_length) { | 1550 if (field_has_length) { |
| 1517 ASSERT(value_cid_reg != kNoRegister); | 1551 ASSERT(value_cid_reg != kNoRegister); |
| 1518 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { | 1552 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { |
| 1519 // Destroy value_cid_reg (safe because we are finished with it). | 1553 // Destroy value_cid_reg (safe because we are finished with it). |
| 1520 __ lw(value_cid_reg, | 1554 __ lw(value_cid_reg, |
| 1521 FieldAddress(value_reg, Array::length_offset())); | 1555 FieldAddress(value_reg, Array::length_offset())); |
| 1522 __ sw(value_cid_reg, field_length_operand); | 1556 __ sw(value_cid_reg, field_length_operand); |
| 1523 } else if (RawObject::IsTypedDataClassId(value_cid)) { | 1557 } else if (RawObject::IsTypedDataClassId(value_cid)) { |
| 1524 // Destroy value_cid_reg (safe because we are finished with it). | 1558 // Destroy value_cid_reg (safe because we are finished with it). |
| 1525 __ lw(value_cid_reg, | 1559 __ lw(value_cid_reg, |
| 1526 FieldAddress(value_reg, TypedData::length_offset())); | 1560 FieldAddress(value_reg, TypedData::length_offset())); |
| 1527 __ sw(value_cid_reg, field_length_operand); | 1561 __ sw(value_cid_reg, field_length_operand); |
| 1528 } else { | 1562 } else { |
| 1529 // Destroy value_cid_reg (safe because we are finished with it). | 1563 // Destroy value_cid_reg (safe because we are finished with it). |
| 1530 __ LoadImmediate(value_cid_reg, Smi::RawValue(Field::kNoFixedLength)); | 1564 __ LoadImmediate(value_cid_reg, Smi::RawValue(Field::kNoFixedLength)); |
| 1531 __ sw(value_cid_reg, field_length_operand); | 1565 __ sw(value_cid_reg, field_length_operand); |
| 1532 } | 1566 } |
| 1533 } | 1567 } |
| 1534 } | 1568 } |
| 1535 if (!ok_is_fall_through) { | 1569 if (!ok_is_fall_through) { |
| 1536 __ b(&ok); | 1570 __ b(&ok); |
| 1537 } | 1571 } |
| 1538 } else { | 1572 } else { |
| 1539 if (field_reg != kNoRegister) { | 1573 if (field_reg != kNoRegister) { |
| 1540 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); | 1574 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); |
| 1541 } | 1575 } |
| 1542 if (value_cid == kDynamicCid) { | 1576 if (value_cid == kDynamicCid) { |
| 1543 // Field's guarded class id is fixed by value's class id is not known. | 1577 // Field's guarded class id is fixed by value's class id is not known. |
| 1544 __ andi(CMPRES, value_reg, Immediate(kSmiTagMask)); | 1578 __ andi(CMPRES1, value_reg, Immediate(kSmiTagMask)); |
| 1545 | 1579 |
| 1546 if (field_cid != kSmiCid) { | 1580 if (field_cid != kSmiCid) { |
| 1547 __ beq(CMPRES, ZR, fail); | 1581 __ beq(CMPRES1, ZR, fail); |
| 1548 __ LoadClassId(value_cid_reg, value_reg); | 1582 __ LoadClassId(value_cid_reg, value_reg); |
| 1549 __ LoadImmediate(TMP1, field_cid); | 1583 __ LoadImmediate(TMP, field_cid); |
| 1550 __ subu(CMPRES, value_cid_reg, TMP1); | 1584 __ subu(CMPRES1, value_cid_reg, TMP); |
| 1551 } | 1585 } |
| 1552 | 1586 |
| 1553 if (field_has_length) { | 1587 if (field_has_length) { |
| 1554 // Jump when Value CID != Field guard CID | 1588 // Jump when Value CID != Field guard CID |
| 1555 __ bne(CMPRES, ZR, fail); | 1589 __ bne(CMPRES1, ZR, fail); |
| 1556 // Classes are same, perform guarded list length check. | 1590 // Classes are same, perform guarded list length check. |
| 1557 ASSERT(field_reg != kNoRegister); | 1591 ASSERT(field_reg != kNoRegister); |
| 1558 ASSERT(value_cid_reg != kNoRegister); | 1592 ASSERT(value_cid_reg != kNoRegister); |
| 1559 FieldAddress field_length_operand( | 1593 FieldAddress field_length_operand( |
| 1560 field_reg, Field::guarded_list_length_offset()); | 1594 field_reg, Field::guarded_list_length_offset()); |
| 1561 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { | 1595 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { |
| 1562 // Destroy value_cid_reg (safe because we are finished with it). | 1596 // Destroy value_cid_reg (safe because we are finished with it). |
| 1563 __ lw(value_cid_reg, | 1597 __ lw(value_cid_reg, |
| 1564 FieldAddress(value_reg, Array::length_offset())); | 1598 FieldAddress(value_reg, Array::length_offset())); |
| 1565 } else if (RawObject::IsTypedDataClassId(field_cid)) { | 1599 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
| 1566 // Destroy value_cid_reg (safe because we are finished with it). | 1600 // Destroy value_cid_reg (safe because we are finished with it). |
| 1567 __ lw(value_cid_reg, | 1601 __ lw(value_cid_reg, |
| 1568 FieldAddress(value_reg, TypedData::length_offset())); | 1602 FieldAddress(value_reg, TypedData::length_offset())); |
| 1569 } | 1603 } |
| 1570 __ lw(TMP1, field_length_operand); | 1604 __ lw(TMP, field_length_operand); |
| 1571 __ subu(CMPRES, value_cid_reg, TMP1); | 1605 __ subu(CMPRES1, value_cid_reg, TMP); |
| 1572 } | 1606 } |
| 1573 | 1607 |
| 1574 if (field().is_nullable() && (field_cid != kNullCid)) { | 1608 if (field().is_nullable() && (field_cid != kNullCid)) { |
| 1575 __ beq(CMPRES, ZR, &ok); | 1609 __ beq(CMPRES1, ZR, &ok); |
| 1576 __ LoadImmediate(TMP, reinterpret_cast<int32_t>(Object::null())); | 1610 __ LoadImmediate(TMP, reinterpret_cast<int32_t>(Object::null())); |
| 1577 __ subu(CMPRES, value_reg, TMP); | 1611 __ subu(CMPRES1, value_reg, TMP); |
| 1578 } | 1612 } |
| 1579 | 1613 |
| 1580 if (ok_is_fall_through) { | 1614 if (ok_is_fall_through) { |
| 1581 __ bne(CMPRES, ZR, fail); | 1615 __ bne(CMPRES1, ZR, fail); |
| 1582 } else { | 1616 } else { |
| 1583 __ beq(CMPRES, ZR, &ok); | 1617 __ beq(CMPRES1, ZR, &ok); |
| 1584 } | 1618 } |
| 1585 } else { | 1619 } else { |
| 1586 // Both value's and field's class id is known. | 1620 // Both value's and field's class id is known. |
| 1587 if ((value_cid != field_cid) && (value_cid != nullability)) { | 1621 if ((value_cid != field_cid) && (value_cid != nullability)) { |
| 1588 if (ok_is_fall_through) { | 1622 if (ok_is_fall_through) { |
| 1589 __ b(fail); | 1623 __ b(fail); |
| 1590 } | 1624 } |
| 1591 } else if (field_has_length && (value_cid == field_cid)) { | 1625 } else if (field_has_length && (value_cid == field_cid)) { |
| 1592 ASSERT(value_cid_reg != kNoRegister); | 1626 ASSERT(value_cid_reg != kNoRegister); |
| 1593 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { | 1627 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { |
| 1594 // Destroy value_cid_reg (safe because we are finished with it). | 1628 // Destroy value_cid_reg (safe because we are finished with it). |
| 1595 __ lw(value_cid_reg, | 1629 __ lw(value_cid_reg, |
| 1596 FieldAddress(value_reg, Array::length_offset())); | 1630 FieldAddress(value_reg, Array::length_offset())); |
| 1597 } else if (RawObject::IsTypedDataClassId(field_cid)) { | 1631 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
| 1598 // Destroy value_cid_reg (safe because we are finished with it). | 1632 // Destroy value_cid_reg (safe because we are finished with it). |
| 1599 __ lw(value_cid_reg, | 1633 __ lw(value_cid_reg, |
| 1600 FieldAddress(value_reg, TypedData::length_offset())); | 1634 FieldAddress(value_reg, TypedData::length_offset())); |
| 1601 } | 1635 } |
| 1602 __ LoadImmediate(TMP1, Smi::RawValue(field_length)); | 1636 __ LoadImmediate(TMP, Smi::RawValue(field_length)); |
| 1603 __ subu(CMPRES, value_cid_reg, TMP1); | 1637 __ subu(CMPRES1, value_cid_reg, TMP); |
| 1604 if (ok_is_fall_through) { | 1638 if (ok_is_fall_through) { |
| 1605 __ bne(CMPRES, ZR, fail); | 1639 __ bne(CMPRES1, ZR, fail); |
| 1606 } | 1640 } |
| 1607 } else { | 1641 } else { |
| 1608 // Nothing to emit. | 1642 // Nothing to emit. |
| 1609 ASSERT(!compiler->is_optimizing()); | 1643 ASSERT(!compiler->is_optimizing()); |
| 1610 return; | 1644 return; |
| 1611 } | 1645 } |
| 1612 } | 1646 } |
| 1613 } | 1647 } |
| 1614 | 1648 |
| 1615 if (deopt == NULL) { | 1649 if (deopt == NULL) { |
| (...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1814 | 1848 |
| 1815 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1849 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 1816 __ TraceSimMsg("InstantiateTypeInstr"); | 1850 __ TraceSimMsg("InstantiateTypeInstr"); |
| 1817 Register instantiator_reg = locs()->in(0).reg(); | 1851 Register instantiator_reg = locs()->in(0).reg(); |
| 1818 Register result_reg = locs()->out().reg(); | 1852 Register result_reg = locs()->out().reg(); |
| 1819 | 1853 |
| 1820 // 'instantiator_reg' is the instantiator AbstractTypeArguments object | 1854 // 'instantiator_reg' is the instantiator AbstractTypeArguments object |
| 1821 // (or null). | 1855 // (or null). |
| 1822 // A runtime call to instantiate the type is required. | 1856 // A runtime call to instantiate the type is required. |
| 1823 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 1857 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 1824 __ LoadObject(TMP1, Object::ZoneHandle()); | 1858 __ LoadObject(TMP, Object::ZoneHandle()); |
| 1825 __ sw(TMP1, Address(SP, 2 * kWordSize)); // Make room for the result. | 1859 __ sw(TMP, Address(SP, 2 * kWordSize)); // Make room for the result. |
| 1826 __ LoadObject(TMP1, type()); | 1860 __ LoadObject(TMP, type()); |
| 1827 __ sw(TMP1, Address(SP, 1 * kWordSize)); | 1861 __ sw(TMP, Address(SP, 1 * kWordSize)); |
| 1828 // Push instantiator type arguments. | 1862 // Push instantiator type arguments. |
| 1829 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); | 1863 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); |
| 1830 | 1864 |
| 1831 compiler->GenerateRuntimeCall(token_pos(), | 1865 compiler->GenerateRuntimeCall(token_pos(), |
| 1832 deopt_id(), | 1866 deopt_id(), |
| 1833 kInstantiateTypeRuntimeEntry, | 1867 kInstantiateTypeRuntimeEntry, |
| 1834 2, | 1868 2, |
| 1835 locs()); | 1869 locs()); |
| 1836 // Pop instantiated type. | 1870 // Pop instantiated type. |
| 1837 __ lw(result_reg, Address(SP, 2 * kWordSize)); | 1871 __ lw(result_reg, Address(SP, 2 * kWordSize)); |
| (...skipping 30 matching lines...) Expand all Loading... |
| 1868 // the type arguments. | 1902 // the type arguments. |
| 1869 Label type_arguments_instantiated; | 1903 Label type_arguments_instantiated; |
| 1870 const intptr_t len = type_arguments().Length(); | 1904 const intptr_t len = type_arguments().Length(); |
| 1871 if (type_arguments().IsRawInstantiatedRaw(len)) { | 1905 if (type_arguments().IsRawInstantiatedRaw(len)) { |
| 1872 __ BranchEqual(instantiator_reg, reinterpret_cast<int32_t>(Object::null()), | 1906 __ BranchEqual(instantiator_reg, reinterpret_cast<int32_t>(Object::null()), |
| 1873 &type_arguments_instantiated); | 1907 &type_arguments_instantiated); |
| 1874 } | 1908 } |
| 1875 // Instantiate non-null type arguments. | 1909 // Instantiate non-null type arguments. |
| 1876 // A runtime call to instantiate the type arguments is required. | 1910 // A runtime call to instantiate the type arguments is required. |
| 1877 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 1911 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
| 1878 __ LoadObject(TMP1, Object::ZoneHandle()); | 1912 __ LoadObject(TMP, Object::ZoneHandle()); |
| 1879 __ sw(TMP1, Address(SP, 2 * kWordSize)); // Make room for the result. | 1913 __ sw(TMP, Address(SP, 2 * kWordSize)); // Make room for the result. |
| 1880 __ LoadObject(TMP1, type_arguments()); | 1914 __ LoadObject(TMP, type_arguments()); |
| 1881 __ sw(TMP1, Address(SP, 1 * kWordSize)); | 1915 __ sw(TMP, Address(SP, 1 * kWordSize)); |
| 1882 // Push instantiator type arguments. | 1916 // Push instantiator type arguments. |
| 1883 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); | 1917 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); |
| 1884 | 1918 |
| 1885 compiler->GenerateRuntimeCall(token_pos(), | 1919 compiler->GenerateRuntimeCall(token_pos(), |
| 1886 deopt_id(), | 1920 deopt_id(), |
| 1887 kInstantiateTypeArgumentsRuntimeEntry, | 1921 kInstantiateTypeArgumentsRuntimeEntry, |
| 1888 2, | 1922 2, |
| 1889 locs()); | 1923 locs()); |
| 1890 // Pop instantiated type arguments. | 1924 // Pop instantiated type arguments. |
| 1891 __ lw(result_reg, Address(SP, 2 * kWordSize)); | 1925 __ lw(result_reg, Address(SP, 2 * kWordSize)); |
| (...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2014 } | 2048 } |
| 2015 | 2049 |
| 2016 | 2050 |
| 2017 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2051 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2018 Register context_value = locs()->in(0).reg(); | 2052 Register context_value = locs()->in(0).reg(); |
| 2019 Register result = locs()->out().reg(); | 2053 Register result = locs()->out().reg(); |
| 2020 | 2054 |
| 2021 __ TraceSimMsg("CloneContextInstr"); | 2055 __ TraceSimMsg("CloneContextInstr"); |
| 2022 | 2056 |
| 2023 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 2057 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
| 2024 __ LoadObject(TMP1, Object::ZoneHandle()); // Make room for the result. | 2058 __ LoadObject(TMP, Object::ZoneHandle()); // Make room for the result. |
| 2025 __ sw(TMP1, Address(SP, 1 * kWordSize)); | 2059 __ sw(TMP, Address(SP, 1 * kWordSize)); |
| 2026 __ sw(context_value, Address(SP, 0 * kWordSize)); | 2060 __ sw(context_value, Address(SP, 0 * kWordSize)); |
| 2027 | 2061 |
| 2028 compiler->GenerateRuntimeCall(token_pos(), | 2062 compiler->GenerateRuntimeCall(token_pos(), |
| 2029 deopt_id(), | 2063 deopt_id(), |
| 2030 kCloneContextRuntimeEntry, | 2064 kCloneContextRuntimeEntry, |
| 2031 1, | 2065 1, |
| 2032 locs()); | 2066 locs()); |
| 2033 __ lw(result, Address(SP, 1 * kWordSize)); // Get result (cloned context). | 2067 __ lw(result, Address(SP, 1 * kWordSize)); // Get result (cloned context). |
| 2034 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 2068 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
| 2035 } | 2069 } |
| 2036 | 2070 |
| 2037 | 2071 |
| 2038 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const { | 2072 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const { |
| 2039 UNREACHABLE(); | 2073 UNREACHABLE(); |
| 2040 return NULL; | 2074 return NULL; |
| 2041 } | 2075 } |
| 2042 | 2076 |
| 2043 | 2077 |
| 2044 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2078 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2045 __ Bind(compiler->GetJumpLabel(this)); | 2079 __ Bind(compiler->GetJumpLabel(this)); |
| 2046 compiler->AddExceptionHandler(catch_try_index(), | 2080 compiler->AddExceptionHandler(catch_try_index(), |
| 2047 try_index(), | 2081 try_index(), |
| 2048 compiler->assembler()->CodeSize(), | 2082 compiler->assembler()->CodeSize(), |
| 2049 catch_handler_types_, | 2083 catch_handler_types_, |
| 2050 needs_stacktrace()); | 2084 needs_stacktrace()); |
| 2051 // Restore pool pointer. | 2085 // Restore pool pointer. |
| 2052 __ GetNextPC(CMPRES, TMP); | 2086 __ GetNextPC(CMPRES1, TMP); |
| 2053 const intptr_t object_pool_pc_dist = | 2087 const intptr_t object_pool_pc_dist = |
| 2054 Instructions::HeaderSize() - Instructions::object_pool_offset() + | 2088 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
| 2055 compiler->assembler()->CodeSize() - 1 * Instr::kInstrSize; | 2089 compiler->assembler()->CodeSize() - 1 * Instr::kInstrSize; |
| 2056 __ LoadFromOffset(PP, CMPRES, -object_pool_pc_dist); | 2090 __ LoadFromOffset(PP, CMPRES1, -object_pool_pc_dist); |
| 2057 | 2091 |
| 2058 if (HasParallelMove()) { | 2092 if (HasParallelMove()) { |
| 2059 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 2093 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
| 2060 } | 2094 } |
| 2061 | 2095 |
| 2062 // Restore SP from FP as we are coming from a throw and the code for | 2096 // Restore SP from FP as we are coming from a throw and the code for |
| 2063 // popping arguments has not been run. | 2097 // popping arguments has not been run. |
| 2064 const intptr_t fp_sp_dist = | 2098 const intptr_t fp_sp_dist = |
| 2065 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 2099 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
| 2066 ASSERT(fp_sp_dist <= 0); | 2100 ASSERT(fp_sp_dist <= 0); |
| (...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2122 private: | 2156 private: |
| 2123 CheckStackOverflowInstr* instruction_; | 2157 CheckStackOverflowInstr* instruction_; |
| 2124 }; | 2158 }; |
| 2125 | 2159 |
| 2126 | 2160 |
| 2127 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2161 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2128 __ TraceSimMsg("CheckStackOverflowInstr"); | 2162 __ TraceSimMsg("CheckStackOverflowInstr"); |
| 2129 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); | 2163 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); |
| 2130 compiler->AddSlowPathCode(slow_path); | 2164 compiler->AddSlowPathCode(slow_path); |
| 2131 | 2165 |
| 2132 __ LoadImmediate(TMP1, Isolate::Current()->stack_limit_address()); | 2166 __ LoadImmediate(TMP, Isolate::Current()->stack_limit_address()); |
| 2133 | 2167 |
| 2134 __ lw(CMPRES1, Address(TMP1)); | 2168 __ lw(CMPRES1, Address(TMP)); |
| 2135 __ BranchUnsignedLessEqual(SP, CMPRES1, slow_path->entry_label()); | 2169 __ BranchUnsignedLessEqual(SP, CMPRES1, slow_path->entry_label()); |
| 2136 if (compiler->CanOSRFunction() && in_loop()) { | 2170 if (compiler->CanOSRFunction() && in_loop()) { |
| 2137 Register temp = locs()->temp(0).reg(); | 2171 Register temp = locs()->temp(0).reg(); |
| 2138 // In unoptimized code check the usage counter to trigger OSR at loop | 2172 // In unoptimized code check the usage counter to trigger OSR at loop |
| 2139 // stack checks. Use progressively higher thresholds for more deeply | 2173 // stack checks. Use progressively higher thresholds for more deeply |
| 2140 // nested loops to attempt to hit outer loops with OSR when possible. | 2174 // nested loops to attempt to hit outer loops with OSR when possible. |
| 2141 __ LoadObject(temp, compiler->parsed_function().function()); | 2175 __ LoadObject(temp, compiler->parsed_function().function()); |
| 2142 intptr_t threshold = | 2176 intptr_t threshold = |
| 2143 FLAG_optimization_counter_threshold * (loop_depth() + 1); | 2177 FLAG_optimization_counter_threshold * (loop_depth() + 1); |
| 2144 __ lw(temp, FieldAddress(temp, Function::usage_counter_offset())); | 2178 __ lw(temp, FieldAddress(temp, Function::usage_counter_offset())); |
| (...skipping 29 matching lines...) Expand all Loading... |
| 2174 // of constant propagation, inlining, etc. | 2208 // of constant propagation, inlining, etc. |
| 2175 if ((value >= kCountLimit) && is_truncating) { | 2209 if ((value >= kCountLimit) && is_truncating) { |
| 2176 __ mov(result, ZR); | 2210 __ mov(result, ZR); |
| 2177 } else { | 2211 } else { |
| 2178 // Result is Mint or exception. | 2212 // Result is Mint or exception. |
| 2179 __ b(deopt); | 2213 __ b(deopt); |
| 2180 } | 2214 } |
| 2181 } else { | 2215 } else { |
| 2182 if (!is_truncating) { | 2216 if (!is_truncating) { |
| 2183 // Check for overflow (preserve left). | 2217 // Check for overflow (preserve left). |
| 2184 __ sll(TMP1, left, value); | 2218 __ sll(TMP, left, value); |
| 2185 __ sra(CMPRES1, TMP1, value); | 2219 __ sra(CMPRES1, TMP, value); |
| 2186 __ bne(CMPRES1, left, deopt); // Overflow. | 2220 __ bne(CMPRES1, left, deopt); // Overflow. |
| 2187 } | 2221 } |
| 2188 // Shift for result now we know there is no overflow. | 2222 // Shift for result now we know there is no overflow. |
| 2189 __ sll(result, left, value); | 2223 __ sll(result, left, value); |
| 2190 } | 2224 } |
| 2191 return; | 2225 return; |
| 2192 } | 2226 } |
| 2193 | 2227 |
| 2194 // Right (locs.in(1)) is not constant. | 2228 // Right (locs.in(1)) is not constant. |
| 2195 Register right = locs.in(1).reg(); | 2229 Register right = locs.in(1).reg(); |
| (...skipping 29 matching lines...) Expand all Loading... |
| 2225 if (right_needs_check) { | 2259 if (right_needs_check) { |
| 2226 const bool right_may_be_negative = | 2260 const bool right_may_be_negative = |
| 2227 (right_range == NULL) || | 2261 (right_range == NULL) || |
| 2228 !right_range->IsWithin(0, RangeBoundary::kPlusInfinity); | 2262 !right_range->IsWithin(0, RangeBoundary::kPlusInfinity); |
| 2229 if (right_may_be_negative) { | 2263 if (right_may_be_negative) { |
| 2230 ASSERT(shift_left->CanDeoptimize()); | 2264 ASSERT(shift_left->CanDeoptimize()); |
| 2231 __ bltz(right, deopt); | 2265 __ bltz(right, deopt); |
| 2232 } | 2266 } |
| 2233 Label done, is_not_zero; | 2267 Label done, is_not_zero; |
| 2234 | 2268 |
| 2235 __ sltiu(CMPRES, | 2269 __ sltiu(CMPRES1, |
| 2236 right, Immediate(reinterpret_cast<int32_t>(Smi::New(Smi::kBits)))); | 2270 right, Immediate(reinterpret_cast<int32_t>(Smi::New(Smi::kBits)))); |
| 2237 __ movz(result, ZR, CMPRES); // result = right >= kBits ? 0 : result. | 2271 __ movz(result, ZR, CMPRES1); // result = right >= kBits ? 0 : result. |
| 2238 __ sra(TMP1, right, kSmiTagSize); | 2272 __ sra(TMP, right, kSmiTagSize); |
| 2239 __ sllv(TMP1, left, TMP1); | 2273 __ sllv(TMP, left, TMP); |
| 2240 // result = right < kBits ? left << right : result. | 2274 // result = right < kBits ? left << right : result. |
| 2241 __ movn(result, TMP1, CMPRES); | 2275 __ movn(result, TMP, CMPRES1); |
| 2242 } else { | 2276 } else { |
| 2243 __ sra(TMP, right, kSmiTagSize); | 2277 __ sra(TMP, right, kSmiTagSize); |
| 2244 __ sllv(result, left, TMP); | 2278 __ sllv(result, left, TMP); |
| 2245 } | 2279 } |
| 2246 } else { | 2280 } else { |
| 2247 if (right_needs_check) { | 2281 if (right_needs_check) { |
| 2248 ASSERT(shift_left->CanDeoptimize()); | 2282 ASSERT(shift_left->CanDeoptimize()); |
| 2249 __ BranchUnsignedGreaterEqual( | 2283 __ BranchUnsignedGreaterEqual( |
| 2250 right, reinterpret_cast<int32_t>(Smi::New(Smi::kBits)), deopt); | 2284 right, reinterpret_cast<int32_t>(Smi::New(Smi::kBits)), deopt); |
| 2251 } | 2285 } |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2314 if (locs()->in(1).IsConstant()) { | 2348 if (locs()->in(1).IsConstant()) { |
| 2315 const Object& constant = locs()->in(1).constant(); | 2349 const Object& constant = locs()->in(1).constant(); |
| 2316 ASSERT(constant.IsSmi()); | 2350 ASSERT(constant.IsSmi()); |
| 2317 int32_t imm = reinterpret_cast<int32_t>(constant.raw()); | 2351 int32_t imm = reinterpret_cast<int32_t>(constant.raw()); |
| 2318 switch (op_kind()) { | 2352 switch (op_kind()) { |
| 2319 case Token::kSUB: { | 2353 case Token::kSUB: { |
| 2320 __ TraceSimMsg("kSUB imm"); | 2354 __ TraceSimMsg("kSUB imm"); |
| 2321 if (deopt == NULL) { | 2355 if (deopt == NULL) { |
| 2322 __ AddImmediate(result, left, -imm); | 2356 __ AddImmediate(result, left, -imm); |
| 2323 } else { | 2357 } else { |
| 2324 __ SubImmediateDetectOverflow(result, left, imm, CMPRES); | 2358 __ SubImmediateDetectOverflow(result, left, imm, CMPRES1); |
| 2325 __ bltz(CMPRES, deopt); | 2359 __ bltz(CMPRES1, deopt); |
| 2326 } | 2360 } |
| 2327 break; | 2361 break; |
| 2328 } | 2362 } |
| 2329 case Token::kADD: { | 2363 case Token::kADD: { |
| 2330 if (deopt == NULL) { | 2364 if (deopt == NULL) { |
| 2331 __ AddImmediate(result, left, imm); | 2365 __ AddImmediate(result, left, imm); |
| 2332 } else { | 2366 } else { |
| 2333 Register temp = locs()->temp(0).reg(); | 2367 Register temp = locs()->temp(0).reg(); |
| 2334 __ AddImmediateDetectOverflow(result, left, imm, CMPRES, temp); | 2368 __ AddImmediateDetectOverflow(result, left, imm, CMPRES1, temp); |
| 2335 __ bltz(CMPRES, deopt); | 2369 __ bltz(CMPRES1, deopt); |
| 2336 } | 2370 } |
| 2337 break; | 2371 break; |
| 2338 } | 2372 } |
| 2339 case Token::kMUL: { | 2373 case Token::kMUL: { |
| 2340 // Keep left value tagged and untag right value. | 2374 // Keep left value tagged and untag right value. |
| 2341 const intptr_t value = Smi::Cast(constant).Value(); | 2375 const intptr_t value = Smi::Cast(constant).Value(); |
| 2342 if (deopt == NULL) { | 2376 if (deopt == NULL) { |
| 2343 if (value == 2) { | 2377 if (value == 2) { |
| 2344 __ sll(result, left, 1); | 2378 __ sll(result, left, 1); |
| 2345 } else { | 2379 } else { |
| 2346 __ LoadImmediate(TMP1, value); | 2380 __ LoadImmediate(TMP, value); |
| 2347 __ mult(left, TMP1); | 2381 __ mult(left, TMP); |
| 2348 __ mflo(result); | 2382 __ mflo(result); |
| 2349 } | 2383 } |
| 2350 } else { | 2384 } else { |
| 2351 if (value == 2) { | 2385 if (value == 2) { |
| 2352 __ sra(CMPRES2, left, 31); // CMPRES2 = sign of left. | 2386 __ sra(CMPRES2, left, 31); // CMPRES2 = sign of left. |
| 2353 __ sll(result, left, 1); | 2387 __ sll(result, left, 1); |
| 2354 } else { | 2388 } else { |
| 2355 __ LoadImmediate(TMP1, value); | 2389 __ LoadImmediate(TMP, value); |
| 2356 __ mult(left, TMP1); | 2390 __ mult(left, TMP); |
| 2357 __ mflo(result); | 2391 __ mflo(result); |
| 2358 __ mfhi(CMPRES2); | 2392 __ mfhi(CMPRES2); |
| 2359 } | 2393 } |
| 2360 __ sra(CMPRES, result, 31); | 2394 __ sra(CMPRES1, result, 31); |
| 2361 __ bne(CMPRES1, CMPRES2, deopt); | 2395 __ bne(CMPRES1, CMPRES2, deopt); |
| 2362 } | 2396 } |
| 2363 break; | 2397 break; |
| 2364 } | 2398 } |
| 2365 case Token::kTRUNCDIV: { | 2399 case Token::kTRUNCDIV: { |
| 2366 const intptr_t value = Smi::Cast(constant).Value(); | 2400 const intptr_t value = Smi::Cast(constant).Value(); |
| 2367 if (value == 1) { | 2401 if (value == 1) { |
| 2368 if (result != left) { | 2402 if (result != left) { |
| 2369 __ mov(result, left); | 2403 __ mov(result, left); |
| 2370 } | 2404 } |
| (...skipping 20 matching lines...) Expand all Loading... |
| 2391 __ subu(result, ZR, result); | 2425 __ subu(result, ZR, result); |
| 2392 } | 2426 } |
| 2393 __ SmiTag(result); | 2427 __ SmiTag(result); |
| 2394 break; | 2428 break; |
| 2395 } | 2429 } |
| 2396 case Token::kBIT_AND: { | 2430 case Token::kBIT_AND: { |
| 2397 // No overflow check. | 2431 // No overflow check. |
| 2398 if (Utils::IsUint(kImmBits, imm)) { | 2432 if (Utils::IsUint(kImmBits, imm)) { |
| 2399 __ andi(result, left, Immediate(imm)); | 2433 __ andi(result, left, Immediate(imm)); |
| 2400 } else { | 2434 } else { |
| 2401 __ LoadImmediate(TMP1, imm); | 2435 __ LoadImmediate(TMP, imm); |
| 2402 __ and_(result, left, TMP1); | 2436 __ and_(result, left, TMP); |
| 2403 } | 2437 } |
| 2404 break; | 2438 break; |
| 2405 } | 2439 } |
| 2406 case Token::kBIT_OR: { | 2440 case Token::kBIT_OR: { |
| 2407 // No overflow check. | 2441 // No overflow check. |
| 2408 if (Utils::IsUint(kImmBits, imm)) { | 2442 if (Utils::IsUint(kImmBits, imm)) { |
| 2409 __ ori(result, left, Immediate(imm)); | 2443 __ ori(result, left, Immediate(imm)); |
| 2410 } else { | 2444 } else { |
| 2411 __ LoadImmediate(TMP1, imm); | 2445 __ LoadImmediate(TMP, imm); |
| 2412 __ or_(result, left, TMP1); | 2446 __ or_(result, left, TMP); |
| 2413 } | 2447 } |
| 2414 break; | 2448 break; |
| 2415 } | 2449 } |
| 2416 case Token::kBIT_XOR: { | 2450 case Token::kBIT_XOR: { |
| 2417 // No overflow check. | 2451 // No overflow check. |
| 2418 if (Utils::IsUint(kImmBits, imm)) { | 2452 if (Utils::IsUint(kImmBits, imm)) { |
| 2419 __ xori(result, left, Immediate(imm)); | 2453 __ xori(result, left, Immediate(imm)); |
| 2420 } else { | 2454 } else { |
| 2421 __ LoadImmediate(TMP1, imm); | 2455 __ LoadImmediate(TMP, imm); |
| 2422 __ xor_(result, left, TMP1); | 2456 __ xor_(result, left, TMP); |
| 2423 } | 2457 } |
| 2424 break; | 2458 break; |
| 2425 } | 2459 } |
| 2426 case Token::kSHR: { | 2460 case Token::kSHR: { |
| 2427 // sarl operation masks the count to 5 bits. | 2461 // sarl operation masks the count to 5 bits. |
| 2428 const intptr_t kCountLimit = 0x1F; | 2462 const intptr_t kCountLimit = 0x1F; |
| 2429 intptr_t value = Smi::Cast(constant).Value(); | 2463 intptr_t value = Smi::Cast(constant).Value(); |
| 2430 | 2464 |
| 2431 __ TraceSimMsg("kSHR"); | 2465 __ TraceSimMsg("kSHR"); |
| 2432 | 2466 |
| (...skipping 24 matching lines...) Expand all Loading... |
| 2457 return; | 2491 return; |
| 2458 } | 2492 } |
| 2459 | 2493 |
| 2460 Register right = locs()->in(1).reg(); | 2494 Register right = locs()->in(1).reg(); |
| 2461 switch (op_kind()) { | 2495 switch (op_kind()) { |
| 2462 case Token::kADD: { | 2496 case Token::kADD: { |
| 2463 if (deopt == NULL) { | 2497 if (deopt == NULL) { |
| 2464 __ addu(result, left, right); | 2498 __ addu(result, left, right); |
| 2465 } else { | 2499 } else { |
| 2466 Register temp = locs()->temp(0).reg(); | 2500 Register temp = locs()->temp(0).reg(); |
| 2467 __ AdduDetectOverflow(result, left, right, CMPRES, temp); | 2501 __ AdduDetectOverflow(result, left, right, CMPRES1, temp); |
| 2468 __ bltz(CMPRES, deopt); | 2502 __ bltz(CMPRES1, deopt); |
| 2469 } | 2503 } |
| 2470 break; | 2504 break; |
| 2471 } | 2505 } |
| 2472 case Token::kSUB: { | 2506 case Token::kSUB: { |
| 2473 __ TraceSimMsg("kSUB"); | 2507 __ TraceSimMsg("kSUB"); |
| 2474 if (deopt == NULL) { | 2508 if (deopt == NULL) { |
| 2475 __ subu(result, left, right); | 2509 __ subu(result, left, right); |
| 2476 } else { | 2510 } else { |
| 2477 __ SubuDetectOverflow(result, left, right, CMPRES); | 2511 __ SubuDetectOverflow(result, left, right, CMPRES1); |
| 2478 __ bltz(CMPRES, deopt); | 2512 __ bltz(CMPRES1, deopt); |
| 2479 } | 2513 } |
| 2480 break; | 2514 break; |
| 2481 } | 2515 } |
| 2482 case Token::kMUL: { | 2516 case Token::kMUL: { |
| 2483 __ TraceSimMsg("kMUL"); | 2517 __ TraceSimMsg("kMUL"); |
| 2484 __ sra(TMP, left, kSmiTagSize); | 2518 __ sra(TMP, left, kSmiTagSize); |
| 2485 __ mult(TMP, right); | 2519 __ mult(TMP, right); |
| 2486 __ mflo(result); | 2520 __ mflo(result); |
| 2487 if (deopt != NULL) { | 2521 if (deopt != NULL) { |
| 2488 __ mfhi(CMPRES2); | 2522 __ mfhi(CMPRES2); |
| (...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2581 } | 2615 } |
| 2582 | 2616 |
| 2583 | 2617 |
| 2584 void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2618 void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 2585 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptBinaryDoubleOp); | 2619 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptBinaryDoubleOp); |
| 2586 intptr_t left_cid = left()->Type()->ToCid(); | 2620 intptr_t left_cid = left()->Type()->ToCid(); |
| 2587 intptr_t right_cid = right()->Type()->ToCid(); | 2621 intptr_t right_cid = right()->Type()->ToCid(); |
| 2588 Register left = locs()->in(0).reg(); | 2622 Register left = locs()->in(0).reg(); |
| 2589 Register right = locs()->in(1).reg(); | 2623 Register right = locs()->in(1).reg(); |
| 2590 if (left_cid == kSmiCid) { | 2624 if (left_cid == kSmiCid) { |
| 2591 __ andi(CMPRES, right, Immediate(kSmiTagMask)); | 2625 __ andi(CMPRES1, right, Immediate(kSmiTagMask)); |
| 2592 } else if (right_cid == kSmiCid) { | 2626 } else if (right_cid == kSmiCid) { |
| 2593 __ andi(CMPRES, left, Immediate(kSmiTagMask)); | 2627 __ andi(CMPRES1, left, Immediate(kSmiTagMask)); |
| 2594 } else { | 2628 } else { |
| 2595 __ or_(TMP, left, right); | 2629 __ or_(TMP, left, right); |
| 2596 __ andi(CMPRES, TMP, Immediate(kSmiTagMask)); | 2630 __ andi(CMPRES1, TMP, Immediate(kSmiTagMask)); |
| 2597 } | 2631 } |
| 2598 __ beq(CMPRES, ZR, deopt); | 2632 __ beq(CMPRES1, ZR, deopt); |
| 2599 } | 2633 } |
| 2600 | 2634 |
| 2601 | 2635 |
| 2602 LocationSummary* BoxDoubleInstr::MakeLocationSummary() const { | 2636 LocationSummary* BoxDoubleInstr::MakeLocationSummary() const { |
| 2603 const intptr_t kNumInputs = 1; | 2637 const intptr_t kNumInputs = 1; |
| 2604 const intptr_t kNumTemps = 0; | 2638 const intptr_t kNumTemps = 0; |
| 2605 LocationSummary* summary = | 2639 LocationSummary* summary = |
| 2606 new LocationSummary(kNumInputs, | 2640 new LocationSummary(kNumInputs, |
| 2607 kNumTemps, | 2641 kNumTemps, |
| 2608 LocationSummary::kCallOnSlowPath); | 2642 LocationSummary::kCallOnSlowPath); |
| (...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2684 if (value_cid == kDoubleCid) { | 2718 if (value_cid == kDoubleCid) { |
| 2685 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); | 2719 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); |
| 2686 } else if (value_cid == kSmiCid) { | 2720 } else if (value_cid == kSmiCid) { |
| 2687 __ SmiUntag(value); // Untag input before conversion. | 2721 __ SmiUntag(value); // Untag input before conversion. |
| 2688 __ mtc1(value, STMP1); | 2722 __ mtc1(value, STMP1); |
| 2689 __ cvtdw(result, STMP1); | 2723 __ cvtdw(result, STMP1); |
| 2690 } else { | 2724 } else { |
| 2691 Label* deopt = compiler->AddDeoptStub(deopt_id_, kDeoptBinaryDoubleOp); | 2725 Label* deopt = compiler->AddDeoptStub(deopt_id_, kDeoptBinaryDoubleOp); |
| 2692 Label is_smi, done; | 2726 Label is_smi, done; |
| 2693 | 2727 |
| 2694 __ andi(CMPRES, value, Immediate(kSmiTagMask)); | 2728 __ andi(CMPRES1, value, Immediate(kSmiTagMask)); |
| 2695 __ beq(CMPRES, ZR, &is_smi); | 2729 __ beq(CMPRES1, ZR, &is_smi); |
| 2696 __ LoadClassId(CMPRES1, value); | 2730 __ LoadClassId(CMPRES1, value); |
| 2697 __ BranchNotEqual(CMPRES1, kDoubleCid, deopt); | 2731 __ BranchNotEqual(CMPRES1, kDoubleCid, deopt); |
| 2698 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); | 2732 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); |
| 2699 __ b(&done); | 2733 __ b(&done); |
| 2700 __ Bind(&is_smi); | 2734 __ Bind(&is_smi); |
| 2701 // TODO(regis): Why do we preserve value here but not above? | 2735 // TODO(regis): Why do we preserve value here but not above? |
| 2702 __ sra(TMP, value, 1); | 2736 __ sra(TMP, value, 1); |
| 2703 __ mtc1(TMP, STMP1); | 2737 __ mtc1(TMP, STMP1); |
| 2704 __ cvtdw(result, STMP1); | 2738 __ cvtdw(result, STMP1); |
| 2705 __ Bind(&done); | 2739 __ Bind(&done); |
| (...skipping 437 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3143 } | 3177 } |
| 3144 | 3178 |
| 3145 | 3179 |
| 3146 void UnarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3180 void UnarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3147 Register value = locs()->in(0).reg(); | 3181 Register value = locs()->in(0).reg(); |
| 3148 Register result = locs()->out().reg(); | 3182 Register result = locs()->out().reg(); |
| 3149 switch (op_kind()) { | 3183 switch (op_kind()) { |
| 3150 case Token::kNEGATE: { | 3184 case Token::kNEGATE: { |
| 3151 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 3185 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
| 3152 kDeoptUnaryOp); | 3186 kDeoptUnaryOp); |
| 3153 __ SubuDetectOverflow(result, ZR, value, CMPRES); | 3187 __ SubuDetectOverflow(result, ZR, value, CMPRES1); |
| 3154 __ bltz(CMPRES, deopt); | 3188 __ bltz(CMPRES1, deopt); |
| 3155 break; | 3189 break; |
| 3156 } | 3190 } |
| 3157 case Token::kBIT_NOT: | 3191 case Token::kBIT_NOT: |
| 3158 __ nor(result, value, ZR); | 3192 __ nor(result, value, ZR); |
| 3159 __ addiu(result, result, Immediate(-1)); // Remove inverted smi-tag. | 3193 __ addiu(result, result, Immediate(-1)); // Remove inverted smi-tag. |
| 3160 break; | 3194 break; |
| 3161 default: | 3195 default: |
| 3162 UNREACHABLE(); | 3196 UNREACHABLE(); |
| 3163 } | 3197 } |
| 3164 } | 3198 } |
| (...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3226 ASSERT(result == V0); | 3260 ASSERT(result == V0); |
| 3227 ASSERT(result != value_obj); | 3261 ASSERT(result != value_obj); |
| 3228 __ LoadDFromOffset(DTMP, value_obj, Double::value_offset() - kHeapObjectTag); | 3262 __ LoadDFromOffset(DTMP, value_obj, Double::value_offset() - kHeapObjectTag); |
| 3229 __ cvtwd(STMP1, DTMP); | 3263 __ cvtwd(STMP1, DTMP); |
| 3230 __ mfc1(result, STMP1); | 3264 __ mfc1(result, STMP1); |
| 3231 | 3265 |
| 3232 // Overflow is signaled with minint. | 3266 // Overflow is signaled with minint. |
| 3233 Label do_call, done; | 3267 Label do_call, done; |
| 3234 // Check for overflow and that it fits into Smi. | 3268 // Check for overflow and that it fits into Smi. |
| 3235 __ LoadImmediate(TMP, 0xC0000000); | 3269 __ LoadImmediate(TMP, 0xC0000000); |
| 3236 __ subu(CMPRES, result, TMP); | 3270 __ subu(CMPRES1, result, TMP); |
| 3237 __ bltz(CMPRES, &do_call); | 3271 __ bltz(CMPRES1, &do_call); |
| 3238 __ SmiTag(result); | 3272 __ SmiTag(result); |
| 3239 __ b(&done); | 3273 __ b(&done); |
| 3240 __ Bind(&do_call); | 3274 __ Bind(&do_call); |
| 3241 __ Push(value_obj); | 3275 __ Push(value_obj); |
| 3242 ASSERT(instance_call()->HasICData()); | 3276 ASSERT(instance_call()->HasICData()); |
| 3243 const ICData& ic_data = *instance_call()->ic_data(); | 3277 const ICData& ic_data = *instance_call()->ic_data(); |
| 3244 ASSERT((ic_data.NumberOfChecks() == 1)); | 3278 ASSERT((ic_data.NumberOfChecks() == 1)); |
| 3245 const Function& target = Function::ZoneHandle(ic_data.GetTargetAt(0)); | 3279 const Function& target = Function::ZoneHandle(ic_data.GetTargetAt(0)); |
| 3246 | 3280 |
| 3247 const intptr_t kNumberOfArguments = 1; | 3281 const intptr_t kNumberOfArguments = 1; |
| (...skipping 20 matching lines...) Expand all Loading... |
| 3268 | 3302 |
| 3269 void DoubleToSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3303 void DoubleToSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3270 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptDoubleToSmi); | 3304 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptDoubleToSmi); |
| 3271 Register result = locs()->out().reg(); | 3305 Register result = locs()->out().reg(); |
| 3272 DRegister value = locs()->in(0).fpu_reg(); | 3306 DRegister value = locs()->in(0).fpu_reg(); |
| 3273 __ cvtwd(STMP1, value); | 3307 __ cvtwd(STMP1, value); |
| 3274 __ mfc1(result, STMP1); | 3308 __ mfc1(result, STMP1); |
| 3275 | 3309 |
| 3276 // Check for overflow and that it fits into Smi. | 3310 // Check for overflow and that it fits into Smi. |
| 3277 __ LoadImmediate(TMP, 0xC0000000); | 3311 __ LoadImmediate(TMP, 0xC0000000); |
| 3278 __ subu(CMPRES, result, TMP); | 3312 __ subu(CMPRES1, result, TMP); |
| 3279 __ bltz(CMPRES, deopt); | 3313 __ bltz(CMPRES1, deopt); |
| 3280 __ SmiTag(result); | 3314 __ SmiTag(result); |
| 3281 } | 3315 } |
| 3282 | 3316 |
| 3283 | 3317 |
| 3284 LocationSummary* DoubleToDoubleInstr::MakeLocationSummary() const { | 3318 LocationSummary* DoubleToDoubleInstr::MakeLocationSummary() const { |
| 3285 UNIMPLEMENTED(); | 3319 UNIMPLEMENTED(); |
| 3286 return NULL; | 3320 return NULL; |
| 3287 } | 3321 } |
| 3288 | 3322 |
| 3289 | 3323 |
| (...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3433 | 3467 |
| 3434 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || | 3468 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || |
| 3435 (unary_checks().NumberOfChecks() > 1)); | 3469 (unary_checks().NumberOfChecks() > 1)); |
| 3436 Register value = locs()->in(0).reg(); | 3470 Register value = locs()->in(0).reg(); |
| 3437 Register temp = locs()->temp(0).reg(); | 3471 Register temp = locs()->temp(0).reg(); |
| 3438 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 3472 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
| 3439 kDeoptCheckClass); | 3473 kDeoptCheckClass); |
| 3440 Label is_ok; | 3474 Label is_ok; |
| 3441 intptr_t cix = 0; | 3475 intptr_t cix = 0; |
| 3442 if (unary_checks().GetReceiverClassIdAt(cix) == kSmiCid) { | 3476 if (unary_checks().GetReceiverClassIdAt(cix) == kSmiCid) { |
| 3443 __ andi(CMPRES, value, Immediate(kSmiTagMask)); | 3477 __ andi(CMPRES1, value, Immediate(kSmiTagMask)); |
| 3444 __ beq(CMPRES, ZR, &is_ok); | 3478 __ beq(CMPRES1, ZR, &is_ok); |
| 3445 cix++; // Skip first check. | 3479 cix++; // Skip first check. |
| 3446 } else { | 3480 } else { |
| 3447 __ andi(CMPRES, value, Immediate(kSmiTagMask)); | 3481 __ andi(CMPRES1, value, Immediate(kSmiTagMask)); |
| 3448 __ beq(CMPRES, ZR, deopt); | 3482 __ beq(CMPRES1, ZR, deopt); |
| 3449 } | 3483 } |
| 3450 __ LoadClassId(temp, value); | 3484 __ LoadClassId(temp, value); |
| 3451 const intptr_t num_checks = unary_checks().NumberOfChecks(); | 3485 const intptr_t num_checks = unary_checks().NumberOfChecks(); |
| 3452 for (intptr_t i = cix; i < num_checks; i++) { | 3486 for (intptr_t i = cix; i < num_checks; i++) { |
| 3453 ASSERT(unary_checks().GetReceiverClassIdAt(i) != kSmiCid); | 3487 ASSERT(unary_checks().GetReceiverClassIdAt(i) != kSmiCid); |
| 3454 __ LoadImmediate(TMP1, unary_checks().GetReceiverClassIdAt(i)); | 3488 __ LoadImmediate(TMP, unary_checks().GetReceiverClassIdAt(i)); |
| 3455 __ subu(CMPRES, temp, TMP1); | 3489 __ subu(CMPRES1, temp, TMP); |
| 3456 if (i == (num_checks - 1)) { | 3490 if (i == (num_checks - 1)) { |
| 3457 __ bne(CMPRES, ZR, deopt); | 3491 __ bne(CMPRES1, ZR, deopt); |
| 3458 } else { | 3492 } else { |
| 3459 __ beq(CMPRES, ZR, &is_ok); | 3493 __ beq(CMPRES1, ZR, &is_ok); |
| 3460 } | 3494 } |
| 3461 } | 3495 } |
| 3462 __ Bind(&is_ok); | 3496 __ Bind(&is_ok); |
| 3463 } | 3497 } |
| 3464 | 3498 |
| 3465 | 3499 |
| 3466 LocationSummary* CheckSmiInstr::MakeLocationSummary() const { | 3500 LocationSummary* CheckSmiInstr::MakeLocationSummary() const { |
| 3467 const intptr_t kNumInputs = 1; | 3501 const intptr_t kNumInputs = 1; |
| 3468 const intptr_t kNumTemps = 0; | 3502 const intptr_t kNumTemps = 0; |
| 3469 LocationSummary* summary = | 3503 LocationSummary* summary = |
| (...skipping 210 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3680 bool value) { | 3714 bool value) { |
| 3681 __ TraceSimMsg("ControlInstruction::EmitBranchOnValue"); | 3715 __ TraceSimMsg("ControlInstruction::EmitBranchOnValue"); |
| 3682 if (value && !compiler->CanFallThroughTo(true_successor())) { | 3716 if (value && !compiler->CanFallThroughTo(true_successor())) { |
| 3683 __ b(compiler->GetJumpLabel(true_successor())); | 3717 __ b(compiler->GetJumpLabel(true_successor())); |
| 3684 } else if (!value && !compiler->CanFallThroughTo(false_successor())) { | 3718 } else if (!value && !compiler->CanFallThroughTo(false_successor())) { |
| 3685 __ b(compiler->GetJumpLabel(false_successor())); | 3719 __ b(compiler->GetJumpLabel(false_successor())); |
| 3686 } | 3720 } |
| 3687 } | 3721 } |
| 3688 | 3722 |
| 3689 | 3723 |
| 3690 // The comparison result is in CMPRES. | 3724 // The comparison result is in CMPRES1. |
| 3691 void ControlInstruction::EmitBranchOnCondition(FlowGraphCompiler* compiler, | 3725 void ControlInstruction::EmitBranchOnCondition(FlowGraphCompiler* compiler, |
| 3692 Condition true_condition) { | 3726 Condition true_condition) { |
| 3693 __ TraceSimMsg("ControlInstruction::EmitBranchOnCondition"); | 3727 __ TraceSimMsg("ControlInstruction::EmitBranchOnCondition"); |
| 3694 if (compiler->CanFallThroughTo(false_successor())) { | 3728 if (compiler->CanFallThroughTo(false_successor())) { |
| 3695 // If the next block is the false successor, fall through to it. | 3729 // If the next block is the false successor, fall through to it. |
| 3696 Label* label = compiler->GetJumpLabel(true_successor()); | 3730 Label* label = compiler->GetJumpLabel(true_successor()); |
| 3697 EmitBranchAfterCompare(compiler, true_condition, label); | 3731 EmitBranchAfterCompare(compiler, true_condition, label); |
| 3698 } else { | 3732 } else { |
| 3699 // If the next block is not the false successor, branch to it. | 3733 // If the next block is not the false successor, branch to it. |
| 3700 Condition false_condition = NegateCondition(true_condition); | 3734 Condition false_condition = NegateCondition(true_condition); |
| (...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3821 Location::RequiresRegister(), | 3855 Location::RequiresRegister(), |
| 3822 LocationSummary::kNoCall); | 3856 LocationSummary::kNoCall); |
| 3823 } | 3857 } |
| 3824 | 3858 |
| 3825 | 3859 |
| 3826 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3860 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| 3827 Register value = locs()->in(0).reg(); | 3861 Register value = locs()->in(0).reg(); |
| 3828 Register result = locs()->out().reg(); | 3862 Register result = locs()->out().reg(); |
| 3829 | 3863 |
| 3830 __ LoadObject(result, Bool::True()); | 3864 __ LoadObject(result, Bool::True()); |
| 3831 __ LoadObject(TMP1, Bool::False()); | 3865 __ LoadObject(TMP, Bool::False()); |
| 3832 __ subu(CMPRES, value, result); | 3866 __ subu(CMPRES1, value, result); |
| 3833 __ movz(result, TMP1, CMPRES); // If value is True, move False into result. | 3867 __ movz(result, TMP, CMPRES1); // If value is True, move False into result. |
| 3834 } | 3868 } |
| 3835 | 3869 |
| 3836 | 3870 |
| 3837 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { | 3871 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { |
| 3838 const intptr_t kNumInputs = 2; | 3872 const intptr_t kNumInputs = 2; |
| 3839 const intptr_t kNumTemps = 0; | 3873 const intptr_t kNumTemps = 0; |
| 3840 LocationSummary* locs = | 3874 LocationSummary* locs = |
| 3841 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 3875 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
| 3842 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() | 3876 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() |
| 3843 : Location::RequiresRegister()); | 3877 : Location::RequiresRegister()); |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3893 compiler->GenerateCall(token_pos(), | 3927 compiler->GenerateCall(token_pos(), |
| 3894 &label, | 3928 &label, |
| 3895 PcDescriptors::kOther, | 3929 PcDescriptors::kOther, |
| 3896 locs()); | 3930 locs()); |
| 3897 __ Drop(2); // Discard type arguments and receiver. | 3931 __ Drop(2); // Discard type arguments and receiver. |
| 3898 } | 3932 } |
| 3899 | 3933 |
| 3900 } // namespace dart | 3934 } // namespace dart |
| 3901 | 3935 |
| 3902 #endif // defined TARGET_ARCH_MIPS | 3936 #endif // defined TARGET_ARCH_MIPS |
| OLD | NEW |