OLD | NEW |
---|---|
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/intermediate_language.h" | 8 #include "vm/intermediate_language.h" |
9 | 9 |
10 #include "vm/dart_entry.h" | 10 #include "vm/dart_entry.h" |
(...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
337 case Token::kGT: return GT; | 337 case Token::kGT: return GT; |
338 case Token::kLTE: return LE; | 338 case Token::kLTE: return LE; |
339 case Token::kGTE: return GE; | 339 case Token::kGTE: return GE; |
340 default: | 340 default: |
341 UNREACHABLE(); | 341 UNREACHABLE(); |
342 return VS; | 342 return VS; |
343 } | 343 } |
344 } | 344 } |
345 | 345 |
346 | 346 |
347 // Branches on condition c assuming comparison results in CMPRES and CMPRES2. | 347 // Branches on condition c assuming comparison results in CMPRES1 and CMPRES2. |
348 static void EmitBranchAfterCompare( | 348 static void EmitBranchAfterCompare( |
349 FlowGraphCompiler* compiler, Condition c, Label* is_true) { | 349 FlowGraphCompiler* compiler, Condition c, Label* is_true) { |
350 switch (c) { | 350 switch (c) { |
351 case EQ: __ beq(CMPRES1, CMPRES2, is_true); break; | 351 case EQ: __ beq(CMPRES1, CMPRES2, is_true); break; |
352 case NE: __ bne(CMPRES1, CMPRES2, is_true); break; | 352 case NE: __ bne(CMPRES1, CMPRES2, is_true); break; |
353 case GT: __ bne(CMPRES2, ZR, is_true); break; | 353 case GT: __ bne(CMPRES2, ZR, is_true); break; |
354 case GE: __ beq(CMPRES1, ZR, is_true); break; | 354 case GE: __ beq(CMPRES1, ZR, is_true); break; |
355 case LT: __ bne(CMPRES1, ZR, is_true); break; | 355 case LT: __ bne(CMPRES1, ZR, is_true); break; |
356 case LE: __ beq(CMPRES2, ZR, is_true); break; | 356 case LE: __ beq(CMPRES2, ZR, is_true); break; |
357 default: | 357 default: |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
499 return; | 499 return; |
500 } | 500 } |
501 if (operation_cid() == kDoubleCid) { | 501 if (operation_cid() == kDoubleCid) { |
502 EmitDoubleComparisonOp(compiler, *locs(), kind(), branch); | 502 EmitDoubleComparisonOp(compiler, *locs(), kind(), branch); |
503 return; | 503 return; |
504 } | 504 } |
505 UNREACHABLE(); | 505 UNREACHABLE(); |
506 } | 506 } |
507 | 507 |
508 | 508 |
509 LocationSummary* TestSmiInstr::MakeLocationSummary() const { | |
510 const intptr_t kNumInputs = 2; | |
511 const intptr_t kNumTemps = 0; | |
512 LocationSummary* locs = | |
513 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | |
514 locs->set_in(0, Location::RequiresRegister()); | |
515 // Only one input can be a constant operand. The case of two constant | |
516 // operands should be handled by constant propagation. | |
517 locs->set_in(1, Location::RegisterOrConstant(right())); | |
518 return locs; | |
519 } | |
520 | |
521 | |
522 void TestSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | |
523 // Never emitted outside of the BranchInstr. | |
524 UNREACHABLE(); | |
525 } | |
526 | |
527 | |
528 void TestSmiInstr::EmitBranchCode(FlowGraphCompiler* compiler, | |
529 BranchInstr* branch) { | |
530 Condition branch_condition = (kind() == Token::kNE) ? NE : EQ; | |
531 Register left = locs()->in(0).reg(); | |
532 Location right = locs()->in(1); | |
533 if (right.IsConstant()) { | |
534 ASSERT(right.constant().IsSmi()); | |
535 const int32_t imm = | |
536 reinterpret_cast<int32_t>(right.constant().raw()); | |
537 if (Utils::IsUint(kImmBits, imm)) { | |
zra
2013/11/05 18:38:03
I'm adding AndImmediate in https://codereview.chro
Florian Schneider
2013/11/06 12:13:42
Thanks. Done.
| |
538 __ andi(CMPRES1, left, Immediate(imm)); | |
539 } else { | |
540 __ LoadImmediate(TMP, imm); | |
541 __ and_(CMPRES1, left, TMP); | |
542 } | |
543 } else { | |
544 __ and_(CMPRES1, left, right.reg()); | |
545 } | |
546 __ mov(CMPRES2, ZR); | |
547 branch->EmitBranchOnCondition(compiler, branch_condition); | |
548 } | |
549 | |
550 | |
509 LocationSummary* RelationalOpInstr::MakeLocationSummary() const { | 551 LocationSummary* RelationalOpInstr::MakeLocationSummary() const { |
510 const intptr_t kNumInputs = 2; | 552 const intptr_t kNumInputs = 2; |
511 const intptr_t kNumTemps = 0; | 553 const intptr_t kNumTemps = 0; |
512 if (operation_cid() == kMintCid) { | 554 if (operation_cid() == kMintCid) { |
513 const intptr_t kNumTemps = 2; | 555 const intptr_t kNumTemps = 2; |
514 LocationSummary* locs = | 556 LocationSummary* locs = |
515 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 557 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
516 locs->set_in(0, Location::RequiresFpuRegister()); | 558 locs->set_in(0, Location::RequiresFpuRegister()); |
517 locs->set_in(1, Location::RequiresFpuRegister()); | 559 locs->set_in(1, Location::RequiresFpuRegister()); |
518 locs->set_temp(0, Location::RequiresRegister()); | 560 locs->set_temp(0, Location::RequiresRegister()); |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
641 | 683 |
642 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 684 void StringFromCharCodeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
643 Register char_code = locs()->in(0).reg(); | 685 Register char_code = locs()->in(0).reg(); |
644 Register result = locs()->out().reg(); | 686 Register result = locs()->out().reg(); |
645 | 687 |
646 __ TraceSimMsg("StringFromCharCodeInstr"); | 688 __ TraceSimMsg("StringFromCharCodeInstr"); |
647 | 689 |
648 __ LoadImmediate(result, | 690 __ LoadImmediate(result, |
649 reinterpret_cast<uword>(Symbols::PredefinedAddress())); | 691 reinterpret_cast<uword>(Symbols::PredefinedAddress())); |
650 __ AddImmediate(result, Symbols::kNullCharCodeSymbolOffset * kWordSize); | 692 __ AddImmediate(result, Symbols::kNullCharCodeSymbolOffset * kWordSize); |
651 __ sll(TMP1, char_code, 1); // Char code is a smi. | 693 __ sll(TMP, char_code, 1); // Char code is a smi. |
652 __ addu(TMP1, TMP1, result); | 694 __ addu(TMP, TMP, result); |
653 __ lw(result, Address(TMP1)); | 695 __ lw(result, Address(TMP)); |
654 } | 696 } |
655 | 697 |
656 | 698 |
657 LocationSummary* StringInterpolateInstr::MakeLocationSummary() const { | 699 LocationSummary* StringInterpolateInstr::MakeLocationSummary() const { |
658 const intptr_t kNumInputs = 1; | 700 const intptr_t kNumInputs = 1; |
659 const intptr_t kNumTemps = 0; | 701 const intptr_t kNumTemps = 0; |
660 LocationSummary* summary = | 702 LocationSummary* summary = |
661 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); | 703 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kCall); |
662 summary->set_in(0, Location::RegisterLocation(A0)); | 704 summary->set_in(0, Location::RegisterLocation(A0)); |
663 summary->set_out(Location::RegisterLocation(V0)); | 705 summary->set_out(Location::RegisterLocation(V0)); |
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
700 return LocationSummary::Make(kNumInputs, | 742 return LocationSummary::Make(kNumInputs, |
701 Location::RequiresRegister(), | 743 Location::RequiresRegister(), |
702 LocationSummary::kNoCall); | 744 LocationSummary::kNoCall); |
703 } | 745 } |
704 | 746 |
705 | 747 |
706 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 748 void LoadClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
707 Register object = locs()->in(0).reg(); | 749 Register object = locs()->in(0).reg(); |
708 Register result = locs()->out().reg(); | 750 Register result = locs()->out().reg(); |
709 Label load, done; | 751 Label load, done; |
710 __ andi(CMPRES, object, Immediate(kSmiTagMask)); | 752 __ andi(CMPRES1, object, Immediate(kSmiTagMask)); |
711 __ bne(CMPRES, ZR, &load); | 753 __ bne(CMPRES1, ZR, &load); |
712 __ LoadImmediate(result, Smi::RawValue(kSmiCid)); | 754 __ LoadImmediate(result, Smi::RawValue(kSmiCid)); |
713 __ b(&done); | 755 __ b(&done); |
714 __ Bind(&load); | 756 __ Bind(&load); |
715 __ LoadClassId(result, object); | 757 __ LoadClassId(result, object); |
716 __ SmiTag(result); | 758 __ SmiTag(result); |
717 __ Bind(&done); | 759 __ Bind(&done); |
718 } | 760 } |
719 | 761 |
720 | 762 |
721 CompileType LoadIndexedInstr::ComputeType() const { | 763 CompileType LoadIndexedInstr::ComputeType() const { |
(...skipping 197 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
919 __ lw(result, element_address); | 961 __ lw(result, element_address); |
920 // Verify that the signed value in 'result' can fit inside a Smi. | 962 // Verify that the signed value in 'result' can fit inside a Smi. |
921 __ BranchSignedLess(result, 0xC0000000, deopt); | 963 __ BranchSignedLess(result, 0xC0000000, deopt); |
922 __ SmiTag(result); | 964 __ SmiTag(result); |
923 } | 965 } |
924 break; | 966 break; |
925 case kTypedDataUint32ArrayCid: { | 967 case kTypedDataUint32ArrayCid: { |
926 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptUint32Load); | 968 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptUint32Load); |
927 __ lw(result, element_address); | 969 __ lw(result, element_address); |
928 // Verify that the unsigned value in 'result' can fit inside a Smi. | 970 // Verify that the unsigned value in 'result' can fit inside a Smi. |
929 __ LoadImmediate(TMP1, 0xC0000000); | 971 __ LoadImmediate(TMP, 0xC0000000); |
930 __ and_(CMPRES, result, TMP1); | 972 __ and_(CMPRES1, result, TMP); |
931 __ bne(CMPRES, ZR, deopt); | 973 __ bne(CMPRES1, ZR, deopt); |
932 __ SmiTag(result); | 974 __ SmiTag(result); |
933 } | 975 } |
934 break; | 976 break; |
935 default: | 977 default: |
936 ASSERT((class_id() == kArrayCid) || (class_id() == kImmutableArrayCid)); | 978 ASSERT((class_id() == kArrayCid) || (class_id() == kImmutableArrayCid)); |
937 __ lw(result, element_address); | 979 __ lw(result, element_address); |
938 break; | 980 break; |
939 } | 981 } |
940 } | 982 } |
941 | 983 |
(...skipping 162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1104 value = 0; | 1146 value = 0; |
1105 } | 1147 } |
1106 __ LoadImmediate(TMP, static_cast<int8_t>(value)); | 1148 __ LoadImmediate(TMP, static_cast<int8_t>(value)); |
1107 __ sb(TMP, element_address); | 1149 __ sb(TMP, element_address); |
1108 } else { | 1150 } else { |
1109 Register value = locs()->in(2).reg(); | 1151 Register value = locs()->in(2).reg(); |
1110 Label store_value, bigger, smaller; | 1152 Label store_value, bigger, smaller; |
1111 __ SmiUntag(value); | 1153 __ SmiUntag(value); |
1112 __ BranchUnsignedLess(value, 0xFF + 1, &store_value); | 1154 __ BranchUnsignedLess(value, 0xFF + 1, &store_value); |
1113 __ LoadImmediate(TMP, 0xFF); | 1155 __ LoadImmediate(TMP, 0xFF); |
1114 __ slti(CMPRES, value, Immediate(1)); | 1156 __ slti(CMPRES1, value, Immediate(1)); |
1115 __ movn(TMP, ZR, CMPRES); | 1157 __ movn(TMP, ZR, CMPRES1); |
1116 __ mov(value, TMP); | 1158 __ mov(value, TMP); |
1117 __ Bind(&store_value); | 1159 __ Bind(&store_value); |
1118 __ sb(value, element_address); | 1160 __ sb(value, element_address); |
1119 } | 1161 } |
1120 break; | 1162 break; |
1121 } | 1163 } |
1122 case kTypedDataInt16ArrayCid: | 1164 case kTypedDataInt16ArrayCid: |
1123 case kTypedDataUint16ArrayCid: { | 1165 case kTypedDataUint16ArrayCid: { |
1124 Register value = locs()->in(2).reg(); | 1166 Register value = locs()->in(2).reg(); |
1125 __ SmiUntag(value); | 1167 __ SmiUntag(value); |
(...skipping 167 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1293 __ lw(CMPRES1, field_length_operand); | 1335 __ lw(CMPRES1, field_length_operand); |
1294 __ subu(CMPRES1, TMP, CMPRES1); | 1336 __ subu(CMPRES1, TMP, CMPRES1); |
1295 __ b(&length_compared); | 1337 __ b(&length_compared); |
1296 __ Bind(&no_fixed_length); | 1338 __ Bind(&no_fixed_length); |
1297 __ b(fail); | 1339 __ b(fail); |
1298 __ Bind(&length_compared); | 1340 __ Bind(&length_compared); |
1299 } | 1341 } |
1300 __ bne(CMPRES1, ZR, fail); | 1342 __ bne(CMPRES1, ZR, fail); |
1301 } | 1343 } |
1302 __ Bind(&skip_length_check); | 1344 __ Bind(&skip_length_check); |
1303 __ lw(TMP1, field_nullability_operand); | 1345 __ lw(TMP, field_nullability_operand); |
1304 __ subu(CMPRES, value_cid_reg, TMP1); | 1346 __ subu(CMPRES1, value_cid_reg, TMP); |
1305 } else if (value_cid == kNullCid) { | 1347 } else if (value_cid == kNullCid) { |
1306 // TODO(regis): TMP1 may conflict. Revisit. | 1348 __ lw(TMP, field_nullability_operand); |
1307 __ lw(TMP1, field_nullability_operand); | 1349 __ LoadImmediate(CMPRES1, value_cid); |
1308 __ LoadImmediate(CMPRES, value_cid); | 1350 __ subu(CMPRES1, TMP, CMPRES1); |
1309 __ subu(CMPRES, TMP1, CMPRES); | |
1310 } else { | 1351 } else { |
1311 Label skip_length_check; | 1352 Label skip_length_check; |
1312 // TODO(regis): TMP1 may conflict. Revisit. | 1353 __ lw(TMP, field_cid_operand); |
1313 __ lw(TMP1, field_cid_operand); | 1354 __ LoadImmediate(CMPRES1, value_cid); |
1314 __ LoadImmediate(CMPRES, value_cid); | 1355 __ subu(CMPRES1, TMP, CMPRES1); |
1315 __ subu(CMPRES, TMP1, CMPRES); | 1356 __ bne(CMPRES1, ZR, &skip_length_check); |
1316 __ bne(CMPRES, ZR, &skip_length_check); | |
1317 // Insert length check. | 1357 // Insert length check. |
1318 if (field_has_length) { | 1358 if (field_has_length) { |
1319 ASSERT(value_cid_reg != kNoRegister); | 1359 ASSERT(value_cid_reg != kNoRegister); |
1320 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { | 1360 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { |
1321 __ lw(TMP, FieldAddress(value_reg, Array::length_offset())); | 1361 __ lw(TMP, FieldAddress(value_reg, Array::length_offset())); |
1322 __ LoadImmediate(CMPRES, Smi::RawValue(field_length)); | 1362 __ LoadImmediate(CMPRES1, Smi::RawValue(field_length)); |
1323 __ subu(CMPRES, TMP, CMPRES); | 1363 __ subu(CMPRES1, TMP, CMPRES1); |
1324 } else if (RawObject::IsTypedDataClassId(value_cid)) { | 1364 } else if (RawObject::IsTypedDataClassId(value_cid)) { |
1325 __ lw(TMP, FieldAddress(value_reg, TypedData::length_offset())); | 1365 __ lw(TMP, FieldAddress(value_reg, TypedData::length_offset())); |
1326 __ LoadImmediate(CMPRES, Smi::RawValue(field_length)); | 1366 __ LoadImmediate(CMPRES1, Smi::RawValue(field_length)); |
1327 __ subu(CMPRES, TMP, CMPRES); | 1367 __ subu(CMPRES1, TMP, CMPRES1); |
1328 } else if (field_cid != kIllegalCid) { | 1368 } else if (field_cid != kIllegalCid) { |
1329 ASSERT(field_cid != value_cid); | 1369 ASSERT(field_cid != value_cid); |
1330 ASSERT(field_length >= 0); | 1370 ASSERT(field_length >= 0); |
1331 // Field has a known class id and length. At compile time it is | 1371 // Field has a known class id and length. At compile time it is |
1332 // known that the value's class id is not a fixed length list. | 1372 // known that the value's class id is not a fixed length list. |
1333 __ b(fail); | 1373 __ b(fail); |
1334 } else { | 1374 } else { |
1335 ASSERT(field_cid == kIllegalCid); | 1375 ASSERT(field_cid == kIllegalCid); |
1336 ASSERT(field_length == Field::kUnknownFixedLength); | 1376 ASSERT(field_length == Field::kUnknownFixedLength); |
1337 // Following jump cannot not occur, fall through. | 1377 // Following jump cannot not occur, fall through. |
1338 } | 1378 } |
1339 __ bne(CMPRES, ZR, fail); | 1379 __ bne(CMPRES1, ZR, fail); |
1340 } | 1380 } |
1341 __ Bind(&skip_length_check); | 1381 __ Bind(&skip_length_check); |
1342 } | 1382 } |
1343 __ beq(CMPRES, ZR, &ok); | 1383 __ beq(CMPRES1, ZR, &ok); |
1344 | 1384 |
1345 __ lw(CMPRES1, field_cid_operand); | 1385 __ lw(CMPRES1, field_cid_operand); |
1346 __ BranchNotEqual(CMPRES1, kIllegalCid, fail); | 1386 __ BranchNotEqual(CMPRES1, kIllegalCid, fail); |
1347 | 1387 |
1348 if (value_cid == kDynamicCid) { | 1388 if (value_cid == kDynamicCid) { |
1349 __ sw(value_cid_reg, field_cid_operand); | 1389 __ sw(value_cid_reg, field_cid_operand); |
1350 __ sw(value_cid_reg, field_nullability_operand); | 1390 __ sw(value_cid_reg, field_nullability_operand); |
1351 if (field_has_length) { | 1391 if (field_has_length) { |
1352 Label check_array, length_set, no_fixed_length; | 1392 Label check_array, length_set, no_fixed_length; |
1353 __ BranchEqual(value_cid_reg, kNullCid, &no_fixed_length); | 1393 __ BranchEqual(value_cid_reg, kNullCid, &no_fixed_length); |
(...skipping 13 matching lines...) Expand all Loading... | |
1367 __ BranchSignedGreater(value_cid_reg, kImmutableArrayCid, | 1407 __ BranchSignedGreater(value_cid_reg, kImmutableArrayCid, |
1368 &no_fixed_length); | 1408 &no_fixed_length); |
1369 __ BranchSignedLess(value_cid_reg, kArrayCid, &no_fixed_length); | 1409 __ BranchSignedLess(value_cid_reg, kArrayCid, &no_fixed_length); |
1370 // Destroy value_cid_reg (safe because we are finished with it). | 1410 // Destroy value_cid_reg (safe because we are finished with it). |
1371 __ lw(value_cid_reg, | 1411 __ lw(value_cid_reg, |
1372 FieldAddress(value_reg, Array::length_offset())); | 1412 FieldAddress(value_reg, Array::length_offset())); |
1373 __ sw(value_cid_reg, field_length_operand); | 1413 __ sw(value_cid_reg, field_length_operand); |
1374 // Updated field length from regular array. | 1414 // Updated field length from regular array. |
1375 __ b(&length_set); | 1415 __ b(&length_set); |
1376 __ Bind(&no_fixed_length); | 1416 __ Bind(&no_fixed_length); |
1377 // TODO(regis): TMP1 may conflict. Revisit. | 1417 __ LoadImmediate(TMP, Smi::RawValue(Field::kNoFixedLength)); |
1378 __ LoadImmediate(TMP1, Smi::RawValue(Field::kNoFixedLength)); | 1418 __ sw(TMP, field_length_operand); |
1379 __ sw(TMP1, field_length_operand); | |
1380 __ Bind(&length_set); | 1419 __ Bind(&length_set); |
1381 } | 1420 } |
1382 } else { | 1421 } else { |
1383 ASSERT(field_reg != kNoRegister); | 1422 ASSERT(field_reg != kNoRegister); |
1384 __ LoadImmediate(TMP1, value_cid); | 1423 __ LoadImmediate(TMP, value_cid); |
1385 __ sw(TMP1, field_cid_operand); | 1424 __ sw(TMP, field_cid_operand); |
1386 __ sw(TMP1, field_nullability_operand); | 1425 __ sw(TMP, field_nullability_operand); |
1387 if (field_has_length) { | 1426 if (field_has_length) { |
1388 ASSERT(value_cid_reg != kNoRegister); | 1427 ASSERT(value_cid_reg != kNoRegister); |
1389 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { | 1428 if ((value_cid == kArrayCid) || (value_cid == kImmutableArrayCid)) { |
1390 // Destroy value_cid_reg (safe because we are finished with it). | 1429 // Destroy value_cid_reg (safe because we are finished with it). |
1391 __ lw(value_cid_reg, | 1430 __ lw(value_cid_reg, |
1392 FieldAddress(value_reg, Array::length_offset())); | 1431 FieldAddress(value_reg, Array::length_offset())); |
1393 __ sw(value_cid_reg, field_length_operand); | 1432 __ sw(value_cid_reg, field_length_operand); |
1394 } else if (RawObject::IsTypedDataClassId(value_cid)) { | 1433 } else if (RawObject::IsTypedDataClassId(value_cid)) { |
1395 // Destroy value_cid_reg (safe because we are finished with it). | 1434 // Destroy value_cid_reg (safe because we are finished with it). |
1396 __ lw(value_cid_reg, | 1435 __ lw(value_cid_reg, |
1397 FieldAddress(value_reg, TypedData::length_offset())); | 1436 FieldAddress(value_reg, TypedData::length_offset())); |
1398 __ sw(value_cid_reg, field_length_operand); | 1437 __ sw(value_cid_reg, field_length_operand); |
1399 } else { | 1438 } else { |
1400 // Destroy value_cid_reg (safe because we are finished with it). | 1439 // Destroy value_cid_reg (safe because we are finished with it). |
1401 __ LoadImmediate(value_cid_reg, Smi::RawValue(Field::kNoFixedLength)); | 1440 __ LoadImmediate(value_cid_reg, Smi::RawValue(Field::kNoFixedLength)); |
1402 __ sw(value_cid_reg, field_length_operand); | 1441 __ sw(value_cid_reg, field_length_operand); |
1403 } | 1442 } |
1404 } | 1443 } |
1405 } | 1444 } |
1406 if (!ok_is_fall_through) { | 1445 if (!ok_is_fall_through) { |
1407 __ b(&ok); | 1446 __ b(&ok); |
1408 } | 1447 } |
1409 } else { | 1448 } else { |
1410 if (field_reg != kNoRegister) { | 1449 if (field_reg != kNoRegister) { |
1411 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); | 1450 __ LoadObject(field_reg, Field::ZoneHandle(field().raw())); |
1412 } | 1451 } |
1413 if (value_cid == kDynamicCid) { | 1452 if (value_cid == kDynamicCid) { |
1414 // Field's guarded class id is fixed by value's class id is not known. | 1453 // Field's guarded class id is fixed by value's class id is not known. |
1415 __ andi(CMPRES, value_reg, Immediate(kSmiTagMask)); | 1454 __ andi(CMPRES1, value_reg, Immediate(kSmiTagMask)); |
1416 | 1455 |
1417 if (field_cid != kSmiCid) { | 1456 if (field_cid != kSmiCid) { |
1418 __ beq(CMPRES, ZR, fail); | 1457 __ beq(CMPRES1, ZR, fail); |
1419 __ LoadClassId(value_cid_reg, value_reg); | 1458 __ LoadClassId(value_cid_reg, value_reg); |
1420 __ LoadImmediate(TMP1, field_cid); | 1459 __ LoadImmediate(TMP, field_cid); |
1421 __ subu(CMPRES, value_cid_reg, TMP1); | 1460 __ subu(CMPRES1, value_cid_reg, TMP); |
1422 } | 1461 } |
1423 | 1462 |
1424 if (field_has_length) { | 1463 if (field_has_length) { |
1425 // Jump when Value CID != Field guard CID | 1464 // Jump when Value CID != Field guard CID |
1426 __ bne(CMPRES, ZR, fail); | 1465 __ bne(CMPRES1, ZR, fail); |
1427 // Classes are same, perform guarded list length check. | 1466 // Classes are same, perform guarded list length check. |
1428 ASSERT(field_reg != kNoRegister); | 1467 ASSERT(field_reg != kNoRegister); |
1429 ASSERT(value_cid_reg != kNoRegister); | 1468 ASSERT(value_cid_reg != kNoRegister); |
1430 FieldAddress field_length_operand( | 1469 FieldAddress field_length_operand( |
1431 field_reg, Field::guarded_list_length_offset()); | 1470 field_reg, Field::guarded_list_length_offset()); |
1432 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { | 1471 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { |
1433 // Destroy value_cid_reg (safe because we are finished with it). | 1472 // Destroy value_cid_reg (safe because we are finished with it). |
1434 __ lw(value_cid_reg, | 1473 __ lw(value_cid_reg, |
1435 FieldAddress(value_reg, Array::length_offset())); | 1474 FieldAddress(value_reg, Array::length_offset())); |
1436 } else if (RawObject::IsTypedDataClassId(field_cid)) { | 1475 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
1437 // Destroy value_cid_reg (safe because we are finished with it). | 1476 // Destroy value_cid_reg (safe because we are finished with it). |
1438 __ lw(value_cid_reg, | 1477 __ lw(value_cid_reg, |
1439 FieldAddress(value_reg, TypedData::length_offset())); | 1478 FieldAddress(value_reg, TypedData::length_offset())); |
1440 } | 1479 } |
1441 __ lw(TMP1, field_length_operand); | 1480 __ lw(TMP, field_length_operand); |
1442 __ subu(CMPRES, value_cid_reg, TMP1); | 1481 __ subu(CMPRES1, value_cid_reg, TMP); |
1443 } | 1482 } |
1444 | 1483 |
1445 if (field().is_nullable() && (field_cid != kNullCid)) { | 1484 if (field().is_nullable() && (field_cid != kNullCid)) { |
1446 __ beq(CMPRES, ZR, &ok); | 1485 __ beq(CMPRES1, ZR, &ok); |
1447 __ LoadImmediate(TMP, reinterpret_cast<int32_t>(Object::null())); | 1486 __ LoadImmediate(TMP, reinterpret_cast<int32_t>(Object::null())); |
1448 __ subu(CMPRES, value_reg, TMP); | 1487 __ subu(CMPRES1, value_reg, TMP); |
1449 } | 1488 } |
1450 | 1489 |
1451 if (ok_is_fall_through) { | 1490 if (ok_is_fall_through) { |
1452 __ bne(CMPRES, ZR, fail); | 1491 __ bne(CMPRES1, ZR, fail); |
1453 } else { | 1492 } else { |
1454 __ beq(CMPRES, ZR, &ok); | 1493 __ beq(CMPRES1, ZR, &ok); |
1455 } | 1494 } |
1456 } else { | 1495 } else { |
1457 // Both value's and field's class id is known. | 1496 // Both value's and field's class id is known. |
1458 if ((value_cid != field_cid) && (value_cid != nullability)) { | 1497 if ((value_cid != field_cid) && (value_cid != nullability)) { |
1459 if (ok_is_fall_through) { | 1498 if (ok_is_fall_through) { |
1460 __ b(fail); | 1499 __ b(fail); |
1461 } | 1500 } |
1462 } else if (field_has_length && (value_cid == field_cid)) { | 1501 } else if (field_has_length && (value_cid == field_cid)) { |
1463 ASSERT(value_cid_reg != kNoRegister); | 1502 ASSERT(value_cid_reg != kNoRegister); |
1464 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { | 1503 if ((field_cid == kArrayCid) || (field_cid == kImmutableArrayCid)) { |
1465 // Destroy value_cid_reg (safe because we are finished with it). | 1504 // Destroy value_cid_reg (safe because we are finished with it). |
1466 __ lw(value_cid_reg, | 1505 __ lw(value_cid_reg, |
1467 FieldAddress(value_reg, Array::length_offset())); | 1506 FieldAddress(value_reg, Array::length_offset())); |
1468 } else if (RawObject::IsTypedDataClassId(field_cid)) { | 1507 } else if (RawObject::IsTypedDataClassId(field_cid)) { |
1469 // Destroy value_cid_reg (safe because we are finished with it). | 1508 // Destroy value_cid_reg (safe because we are finished with it). |
1470 __ lw(value_cid_reg, | 1509 __ lw(value_cid_reg, |
1471 FieldAddress(value_reg, TypedData::length_offset())); | 1510 FieldAddress(value_reg, TypedData::length_offset())); |
1472 } | 1511 } |
1473 __ LoadImmediate(TMP1, Smi::RawValue(field_length)); | 1512 __ LoadImmediate(TMP, Smi::RawValue(field_length)); |
1474 __ subu(CMPRES, value_cid_reg, TMP1); | 1513 __ subu(CMPRES1, value_cid_reg, TMP); |
1475 if (ok_is_fall_through) { | 1514 if (ok_is_fall_through) { |
1476 __ bne(CMPRES, ZR, fail); | 1515 __ bne(CMPRES1, ZR, fail); |
1477 } | 1516 } |
1478 } else { | 1517 } else { |
1479 // Nothing to emit. | 1518 // Nothing to emit. |
1480 ASSERT(!compiler->is_optimizing()); | 1519 ASSERT(!compiler->is_optimizing()); |
1481 return; | 1520 return; |
1482 } | 1521 } |
1483 } | 1522 } |
1484 } | 1523 } |
1485 | 1524 |
1486 if (deopt == NULL) { | 1525 if (deopt == NULL) { |
(...skipping 198 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1685 | 1724 |
1686 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1725 void InstantiateTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1687 __ TraceSimMsg("InstantiateTypeInstr"); | 1726 __ TraceSimMsg("InstantiateTypeInstr"); |
1688 Register instantiator_reg = locs()->in(0).reg(); | 1727 Register instantiator_reg = locs()->in(0).reg(); |
1689 Register result_reg = locs()->out().reg(); | 1728 Register result_reg = locs()->out().reg(); |
1690 | 1729 |
1691 // 'instantiator_reg' is the instantiator AbstractTypeArguments object | 1730 // 'instantiator_reg' is the instantiator AbstractTypeArguments object |
1692 // (or null). | 1731 // (or null). |
1693 // A runtime call to instantiate the type is required. | 1732 // A runtime call to instantiate the type is required. |
1694 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 1733 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
1695 __ LoadObject(TMP1, Object::ZoneHandle()); | 1734 __ LoadObject(TMP, Object::ZoneHandle()); |
1696 __ sw(TMP1, Address(SP, 2 * kWordSize)); // Make room for the result. | 1735 __ sw(TMP, Address(SP, 2 * kWordSize)); // Make room for the result. |
1697 __ LoadObject(TMP1, type()); | 1736 __ LoadObject(TMP, type()); |
1698 __ sw(TMP1, Address(SP, 1 * kWordSize)); | 1737 __ sw(TMP, Address(SP, 1 * kWordSize)); |
1699 // Push instantiator type arguments. | 1738 // Push instantiator type arguments. |
1700 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); | 1739 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); |
1701 | 1740 |
1702 compiler->GenerateRuntimeCall(token_pos(), | 1741 compiler->GenerateRuntimeCall(token_pos(), |
1703 deopt_id(), | 1742 deopt_id(), |
1704 kInstantiateTypeRuntimeEntry, | 1743 kInstantiateTypeRuntimeEntry, |
1705 2, | 1744 2, |
1706 locs()); | 1745 locs()); |
1707 // Pop instantiated type. | 1746 // Pop instantiated type. |
1708 __ lw(result_reg, Address(SP, 2 * kWordSize)); | 1747 __ lw(result_reg, Address(SP, 2 * kWordSize)); |
(...skipping 30 matching lines...) Expand all Loading... | |
1739 // the type arguments. | 1778 // the type arguments. |
1740 Label type_arguments_instantiated; | 1779 Label type_arguments_instantiated; |
1741 const intptr_t len = type_arguments().Length(); | 1780 const intptr_t len = type_arguments().Length(); |
1742 if (type_arguments().IsRawInstantiatedRaw(len)) { | 1781 if (type_arguments().IsRawInstantiatedRaw(len)) { |
1743 __ BranchEqual(instantiator_reg, reinterpret_cast<int32_t>(Object::null()), | 1782 __ BranchEqual(instantiator_reg, reinterpret_cast<int32_t>(Object::null()), |
1744 &type_arguments_instantiated); | 1783 &type_arguments_instantiated); |
1745 } | 1784 } |
1746 // Instantiate non-null type arguments. | 1785 // Instantiate non-null type arguments. |
1747 // A runtime call to instantiate the type arguments is required. | 1786 // A runtime call to instantiate the type arguments is required. |
1748 __ addiu(SP, SP, Immediate(-3 * kWordSize)); | 1787 __ addiu(SP, SP, Immediate(-3 * kWordSize)); |
1749 __ LoadObject(TMP1, Object::ZoneHandle()); | 1788 __ LoadObject(TMP, Object::ZoneHandle()); |
1750 __ sw(TMP1, Address(SP, 2 * kWordSize)); // Make room for the result. | 1789 __ sw(TMP, Address(SP, 2 * kWordSize)); // Make room for the result. |
1751 __ LoadObject(TMP1, type_arguments()); | 1790 __ LoadObject(TMP, type_arguments()); |
1752 __ sw(TMP1, Address(SP, 1 * kWordSize)); | 1791 __ sw(TMP, Address(SP, 1 * kWordSize)); |
1753 // Push instantiator type arguments. | 1792 // Push instantiator type arguments. |
1754 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); | 1793 __ sw(instantiator_reg, Address(SP, 0 * kWordSize)); |
1755 | 1794 |
1756 compiler->GenerateRuntimeCall(token_pos(), | 1795 compiler->GenerateRuntimeCall(token_pos(), |
1757 deopt_id(), | 1796 deopt_id(), |
1758 kInstantiateTypeArgumentsRuntimeEntry, | 1797 kInstantiateTypeArgumentsRuntimeEntry, |
1759 2, | 1798 2, |
1760 locs()); | 1799 locs()); |
1761 // Pop instantiated type arguments. | 1800 // Pop instantiated type arguments. |
1762 __ lw(result_reg, Address(SP, 2 * kWordSize)); | 1801 __ lw(result_reg, Address(SP, 2 * kWordSize)); |
(...skipping 122 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1885 } | 1924 } |
1886 | 1925 |
1887 | 1926 |
1888 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1927 void CloneContextInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1889 Register context_value = locs()->in(0).reg(); | 1928 Register context_value = locs()->in(0).reg(); |
1890 Register result = locs()->out().reg(); | 1929 Register result = locs()->out().reg(); |
1891 | 1930 |
1892 __ TraceSimMsg("CloneContextInstr"); | 1931 __ TraceSimMsg("CloneContextInstr"); |
1893 | 1932 |
1894 __ addiu(SP, SP, Immediate(-2 * kWordSize)); | 1933 __ addiu(SP, SP, Immediate(-2 * kWordSize)); |
1895 __ LoadObject(TMP1, Object::ZoneHandle()); // Make room for the result. | 1934 __ LoadObject(TMP, Object::ZoneHandle()); // Make room for the result. |
1896 __ sw(TMP1, Address(SP, 1 * kWordSize)); | 1935 __ sw(TMP, Address(SP, 1 * kWordSize)); |
1897 __ sw(context_value, Address(SP, 0 * kWordSize)); | 1936 __ sw(context_value, Address(SP, 0 * kWordSize)); |
1898 | 1937 |
1899 compiler->GenerateRuntimeCall(token_pos(), | 1938 compiler->GenerateRuntimeCall(token_pos(), |
1900 deopt_id(), | 1939 deopt_id(), |
1901 kCloneContextRuntimeEntry, | 1940 kCloneContextRuntimeEntry, |
1902 1, | 1941 1, |
1903 locs()); | 1942 locs()); |
1904 __ lw(result, Address(SP, 1 * kWordSize)); // Get result (cloned context). | 1943 __ lw(result, Address(SP, 1 * kWordSize)); // Get result (cloned context). |
1905 __ addiu(SP, SP, Immediate(2 * kWordSize)); | 1944 __ addiu(SP, SP, Immediate(2 * kWordSize)); |
1906 } | 1945 } |
1907 | 1946 |
1908 | 1947 |
1909 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const { | 1948 LocationSummary* CatchBlockEntryInstr::MakeLocationSummary() const { |
1910 UNREACHABLE(); | 1949 UNREACHABLE(); |
1911 return NULL; | 1950 return NULL; |
1912 } | 1951 } |
1913 | 1952 |
1914 | 1953 |
1915 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 1954 void CatchBlockEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1916 __ Bind(compiler->GetJumpLabel(this)); | 1955 __ Bind(compiler->GetJumpLabel(this)); |
1917 compiler->AddExceptionHandler(catch_try_index(), | 1956 compiler->AddExceptionHandler(catch_try_index(), |
1918 try_index(), | 1957 try_index(), |
1919 compiler->assembler()->CodeSize(), | 1958 compiler->assembler()->CodeSize(), |
1920 catch_handler_types_, | 1959 catch_handler_types_, |
1921 needs_stacktrace()); | 1960 needs_stacktrace()); |
1922 // Restore pool pointer. | 1961 // Restore pool pointer. |
1923 __ GetNextPC(CMPRES, TMP); | 1962 __ GetNextPC(CMPRES1, TMP); |
1924 const intptr_t object_pool_pc_dist = | 1963 const intptr_t object_pool_pc_dist = |
1925 Instructions::HeaderSize() - Instructions::object_pool_offset() + | 1964 Instructions::HeaderSize() - Instructions::object_pool_offset() + |
1926 compiler->assembler()->CodeSize() - 1 * Instr::kInstrSize; | 1965 compiler->assembler()->CodeSize() - 1 * Instr::kInstrSize; |
1927 __ LoadFromOffset(PP, CMPRES, -object_pool_pc_dist); | 1966 __ LoadFromOffset(PP, CMPRES1, -object_pool_pc_dist); |
1928 | 1967 |
1929 if (HasParallelMove()) { | 1968 if (HasParallelMove()) { |
1930 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); | 1969 compiler->parallel_move_resolver()->EmitNativeCode(parallel_move()); |
1931 } | 1970 } |
1932 | 1971 |
1933 // Restore SP from FP as we are coming from a throw and the code for | 1972 // Restore SP from FP as we are coming from a throw and the code for |
1934 // popping arguments has not been run. | 1973 // popping arguments has not been run. |
1935 const intptr_t fp_sp_dist = | 1974 const intptr_t fp_sp_dist = |
1936 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; | 1975 (kFirstLocalSlotFromFp + 1 - compiler->StackSize()) * kWordSize; |
1937 ASSERT(fp_sp_dist <= 0); | 1976 ASSERT(fp_sp_dist <= 0); |
(...skipping 55 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1993 private: | 2032 private: |
1994 CheckStackOverflowInstr* instruction_; | 2033 CheckStackOverflowInstr* instruction_; |
1995 }; | 2034 }; |
1996 | 2035 |
1997 | 2036 |
1998 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2037 void CheckStackOverflowInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
1999 __ TraceSimMsg("CheckStackOverflowInstr"); | 2038 __ TraceSimMsg("CheckStackOverflowInstr"); |
2000 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); | 2039 CheckStackOverflowSlowPath* slow_path = new CheckStackOverflowSlowPath(this); |
2001 compiler->AddSlowPathCode(slow_path); | 2040 compiler->AddSlowPathCode(slow_path); |
2002 | 2041 |
2003 __ LoadImmediate(TMP1, Isolate::Current()->stack_limit_address()); | 2042 __ LoadImmediate(TMP, Isolate::Current()->stack_limit_address()); |
2004 | 2043 |
2005 __ lw(CMPRES1, Address(TMP1)); | 2044 __ lw(CMPRES1, Address(TMP)); |
2006 __ BranchUnsignedLessEqual(SP, CMPRES1, slow_path->entry_label()); | 2045 __ BranchUnsignedLessEqual(SP, CMPRES1, slow_path->entry_label()); |
2007 if (compiler->CanOSRFunction() && in_loop()) { | 2046 if (compiler->CanOSRFunction() && in_loop()) { |
2008 Register temp = locs()->temp(0).reg(); | 2047 Register temp = locs()->temp(0).reg(); |
2009 // In unoptimized code check the usage counter to trigger OSR at loop | 2048 // In unoptimized code check the usage counter to trigger OSR at loop |
2010 // stack checks. Use progressively higher thresholds for more deeply | 2049 // stack checks. Use progressively higher thresholds for more deeply |
2011 // nested loops to attempt to hit outer loops with OSR when possible. | 2050 // nested loops to attempt to hit outer loops with OSR when possible. |
2012 __ LoadObject(temp, compiler->parsed_function().function()); | 2051 __ LoadObject(temp, compiler->parsed_function().function()); |
2013 intptr_t threshold = | 2052 intptr_t threshold = |
2014 FLAG_optimization_counter_threshold * (loop_depth() + 1); | 2053 FLAG_optimization_counter_threshold * (loop_depth() + 1); |
2015 __ lw(temp, FieldAddress(temp, Function::usage_counter_offset())); | 2054 __ lw(temp, FieldAddress(temp, Function::usage_counter_offset())); |
(...skipping 29 matching lines...) Expand all Loading... | |
2045 // of constant propagation, inlining, etc. | 2084 // of constant propagation, inlining, etc. |
2046 if ((value >= kCountLimit) && is_truncating) { | 2085 if ((value >= kCountLimit) && is_truncating) { |
2047 __ mov(result, ZR); | 2086 __ mov(result, ZR); |
2048 } else { | 2087 } else { |
2049 // Result is Mint or exception. | 2088 // Result is Mint or exception. |
2050 __ b(deopt); | 2089 __ b(deopt); |
2051 } | 2090 } |
2052 } else { | 2091 } else { |
2053 if (!is_truncating) { | 2092 if (!is_truncating) { |
2054 // Check for overflow (preserve left). | 2093 // Check for overflow (preserve left). |
2055 __ sll(TMP1, left, value); | 2094 __ sll(TMP, left, value); |
2056 __ sra(CMPRES1, TMP1, value); | 2095 __ sra(CMPRES1, TMP, value); |
2057 __ bne(CMPRES1, left, deopt); // Overflow. | 2096 __ bne(CMPRES1, left, deopt); // Overflow. |
2058 } | 2097 } |
2059 // Shift for result now we know there is no overflow. | 2098 // Shift for result now we know there is no overflow. |
2060 __ sll(result, left, value); | 2099 __ sll(result, left, value); |
2061 } | 2100 } |
2062 return; | 2101 return; |
2063 } | 2102 } |
2064 | 2103 |
2065 // Right (locs.in(1)) is not constant. | 2104 // Right (locs.in(1)) is not constant. |
2066 Register right = locs.in(1).reg(); | 2105 Register right = locs.in(1).reg(); |
(...skipping 29 matching lines...) Expand all Loading... | |
2096 if (right_needs_check) { | 2135 if (right_needs_check) { |
2097 const bool right_may_be_negative = | 2136 const bool right_may_be_negative = |
2098 (right_range == NULL) || | 2137 (right_range == NULL) || |
2099 !right_range->IsWithin(0, RangeBoundary::kPlusInfinity); | 2138 !right_range->IsWithin(0, RangeBoundary::kPlusInfinity); |
2100 if (right_may_be_negative) { | 2139 if (right_may_be_negative) { |
2101 ASSERT(shift_left->CanDeoptimize()); | 2140 ASSERT(shift_left->CanDeoptimize()); |
2102 __ bltz(right, deopt); | 2141 __ bltz(right, deopt); |
2103 } | 2142 } |
2104 Label done, is_not_zero; | 2143 Label done, is_not_zero; |
2105 | 2144 |
2106 __ sltiu(CMPRES, | 2145 __ sltiu(CMPRES1, |
2107 right, Immediate(reinterpret_cast<int32_t>(Smi::New(Smi::kBits)))); | 2146 right, Immediate(reinterpret_cast<int32_t>(Smi::New(Smi::kBits)))); |
2108 __ movz(result, ZR, CMPRES); // result = right >= kBits ? 0 : result. | 2147 __ movz(result, ZR, CMPRES1); // result = right >= kBits ? 0 : result. |
2109 __ sra(TMP1, right, kSmiTagSize); | 2148 __ sra(TMP, right, kSmiTagSize); |
2110 __ sllv(TMP1, left, TMP1); | 2149 __ sllv(TMP, left, TMP); |
2111 // result = right < kBits ? left << right : result. | 2150 // result = right < kBits ? left << right : result. |
2112 __ movn(result, TMP1, CMPRES); | 2151 __ movn(result, TMP, CMPRES1); |
2113 } else { | 2152 } else { |
2114 __ sra(TMP, right, kSmiTagSize); | 2153 __ sra(TMP, right, kSmiTagSize); |
2115 __ sllv(result, left, TMP); | 2154 __ sllv(result, left, TMP); |
2116 } | 2155 } |
2117 } else { | 2156 } else { |
2118 if (right_needs_check) { | 2157 if (right_needs_check) { |
2119 ASSERT(shift_left->CanDeoptimize()); | 2158 ASSERT(shift_left->CanDeoptimize()); |
2120 __ BranchUnsignedGreaterEqual( | 2159 __ BranchUnsignedGreaterEqual( |
2121 right, reinterpret_cast<int32_t>(Smi::New(Smi::kBits)), deopt); | 2160 right, reinterpret_cast<int32_t>(Smi::New(Smi::kBits)), deopt); |
2122 } | 2161 } |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2185 if (locs()->in(1).IsConstant()) { | 2224 if (locs()->in(1).IsConstant()) { |
2186 const Object& constant = locs()->in(1).constant(); | 2225 const Object& constant = locs()->in(1).constant(); |
2187 ASSERT(constant.IsSmi()); | 2226 ASSERT(constant.IsSmi()); |
2188 int32_t imm = reinterpret_cast<int32_t>(constant.raw()); | 2227 int32_t imm = reinterpret_cast<int32_t>(constant.raw()); |
2189 switch (op_kind()) { | 2228 switch (op_kind()) { |
2190 case Token::kSUB: { | 2229 case Token::kSUB: { |
2191 __ TraceSimMsg("kSUB imm"); | 2230 __ TraceSimMsg("kSUB imm"); |
2192 if (deopt == NULL) { | 2231 if (deopt == NULL) { |
2193 __ AddImmediate(result, left, -imm); | 2232 __ AddImmediate(result, left, -imm); |
2194 } else { | 2233 } else { |
2195 __ SubImmediateDetectOverflow(result, left, imm, CMPRES); | 2234 __ SubImmediateDetectOverflow(result, left, imm, CMPRES1); |
2196 __ bltz(CMPRES, deopt); | 2235 __ bltz(CMPRES1, deopt); |
2197 } | 2236 } |
2198 break; | 2237 break; |
2199 } | 2238 } |
2200 case Token::kADD: { | 2239 case Token::kADD: { |
2201 if (deopt == NULL) { | 2240 if (deopt == NULL) { |
2202 __ AddImmediate(result, left, imm); | 2241 __ AddImmediate(result, left, imm); |
2203 } else { | 2242 } else { |
2204 Register temp = locs()->temp(0).reg(); | 2243 Register temp = locs()->temp(0).reg(); |
2205 __ AddImmediateDetectOverflow(result, left, imm, CMPRES, temp); | 2244 __ AddImmediateDetectOverflow(result, left, imm, CMPRES1, temp); |
2206 __ bltz(CMPRES, deopt); | 2245 __ bltz(CMPRES1, deopt); |
2207 } | 2246 } |
2208 break; | 2247 break; |
2209 } | 2248 } |
2210 case Token::kMUL: { | 2249 case Token::kMUL: { |
2211 // Keep left value tagged and untag right value. | 2250 // Keep left value tagged and untag right value. |
2212 const intptr_t value = Smi::Cast(constant).Value(); | 2251 const intptr_t value = Smi::Cast(constant).Value(); |
2213 if (deopt == NULL) { | 2252 if (deopt == NULL) { |
2214 if (value == 2) { | 2253 if (value == 2) { |
2215 __ sll(result, left, 1); | 2254 __ sll(result, left, 1); |
2216 } else { | 2255 } else { |
2217 __ LoadImmediate(TMP1, value); | 2256 __ LoadImmediate(TMP, value); |
2218 __ mult(left, TMP1); | 2257 __ mult(left, TMP); |
2219 __ mflo(result); | 2258 __ mflo(result); |
2220 } | 2259 } |
2221 } else { | 2260 } else { |
2222 if (value == 2) { | 2261 if (value == 2) { |
2223 __ sra(CMPRES2, left, 31); // CMPRES2 = sign of left. | 2262 __ sra(CMPRES2, left, 31); // CMPRES2 = sign of left. |
2224 __ sll(result, left, 1); | 2263 __ sll(result, left, 1); |
2225 } else { | 2264 } else { |
2226 __ LoadImmediate(TMP1, value); | 2265 __ LoadImmediate(TMP, value); |
2227 __ mult(left, TMP1); | 2266 __ mult(left, TMP); |
2228 __ mflo(result); | 2267 __ mflo(result); |
2229 __ mfhi(CMPRES2); | 2268 __ mfhi(CMPRES2); |
2230 } | 2269 } |
2231 __ sra(CMPRES, result, 31); | 2270 __ sra(CMPRES1, result, 31); |
2232 __ bne(CMPRES1, CMPRES2, deopt); | 2271 __ bne(CMPRES1, CMPRES2, deopt); |
2233 } | 2272 } |
2234 break; | 2273 break; |
2235 } | 2274 } |
2236 case Token::kTRUNCDIV: { | 2275 case Token::kTRUNCDIV: { |
2237 const intptr_t value = Smi::Cast(constant).Value(); | 2276 const intptr_t value = Smi::Cast(constant).Value(); |
2238 if (value == 1) { | 2277 if (value == 1) { |
2239 if (result != left) { | 2278 if (result != left) { |
2240 __ mov(result, left); | 2279 __ mov(result, left); |
2241 } | 2280 } |
(...skipping 20 matching lines...) Expand all Loading... | |
2262 __ subu(result, ZR, result); | 2301 __ subu(result, ZR, result); |
2263 } | 2302 } |
2264 __ SmiTag(result); | 2303 __ SmiTag(result); |
2265 break; | 2304 break; |
2266 } | 2305 } |
2267 case Token::kBIT_AND: { | 2306 case Token::kBIT_AND: { |
2268 // No overflow check. | 2307 // No overflow check. |
2269 if (Utils::IsUint(kImmBits, imm)) { | 2308 if (Utils::IsUint(kImmBits, imm)) { |
2270 __ andi(result, left, Immediate(imm)); | 2309 __ andi(result, left, Immediate(imm)); |
2271 } else { | 2310 } else { |
2272 __ LoadImmediate(TMP1, imm); | 2311 __ LoadImmediate(TMP, imm); |
2273 __ and_(result, left, TMP1); | 2312 __ and_(result, left, TMP); |
2274 } | 2313 } |
2275 break; | 2314 break; |
2276 } | 2315 } |
2277 case Token::kBIT_OR: { | 2316 case Token::kBIT_OR: { |
2278 // No overflow check. | 2317 // No overflow check. |
2279 if (Utils::IsUint(kImmBits, imm)) { | 2318 if (Utils::IsUint(kImmBits, imm)) { |
2280 __ ori(result, left, Immediate(imm)); | 2319 __ ori(result, left, Immediate(imm)); |
2281 } else { | 2320 } else { |
2282 __ LoadImmediate(TMP1, imm); | 2321 __ LoadImmediate(TMP, imm); |
2283 __ or_(result, left, TMP1); | 2322 __ or_(result, left, TMP); |
2284 } | 2323 } |
2285 break; | 2324 break; |
2286 } | 2325 } |
2287 case Token::kBIT_XOR: { | 2326 case Token::kBIT_XOR: { |
2288 // No overflow check. | 2327 // No overflow check. |
2289 if (Utils::IsUint(kImmBits, imm)) { | 2328 if (Utils::IsUint(kImmBits, imm)) { |
2290 __ xori(result, left, Immediate(imm)); | 2329 __ xori(result, left, Immediate(imm)); |
2291 } else { | 2330 } else { |
2292 __ LoadImmediate(TMP1, imm); | 2331 __ LoadImmediate(TMP, imm); |
2293 __ xor_(result, left, TMP1); | 2332 __ xor_(result, left, TMP); |
2294 } | 2333 } |
2295 break; | 2334 break; |
2296 } | 2335 } |
2297 case Token::kSHR: { | 2336 case Token::kSHR: { |
2298 // sarl operation masks the count to 5 bits. | 2337 // sarl operation masks the count to 5 bits. |
2299 const intptr_t kCountLimit = 0x1F; | 2338 const intptr_t kCountLimit = 0x1F; |
2300 intptr_t value = Smi::Cast(constant).Value(); | 2339 intptr_t value = Smi::Cast(constant).Value(); |
2301 | 2340 |
2302 __ TraceSimMsg("kSHR"); | 2341 __ TraceSimMsg("kSHR"); |
2303 | 2342 |
(...skipping 24 matching lines...) Expand all Loading... | |
2328 return; | 2367 return; |
2329 } | 2368 } |
2330 | 2369 |
2331 Register right = locs()->in(1).reg(); | 2370 Register right = locs()->in(1).reg(); |
2332 switch (op_kind()) { | 2371 switch (op_kind()) { |
2333 case Token::kADD: { | 2372 case Token::kADD: { |
2334 if (deopt == NULL) { | 2373 if (deopt == NULL) { |
2335 __ addu(result, left, right); | 2374 __ addu(result, left, right); |
2336 } else { | 2375 } else { |
2337 Register temp = locs()->temp(0).reg(); | 2376 Register temp = locs()->temp(0).reg(); |
2338 __ AdduDetectOverflow(result, left, right, CMPRES, temp); | 2377 __ AdduDetectOverflow(result, left, right, CMPRES1, temp); |
2339 __ bltz(CMPRES, deopt); | 2378 __ bltz(CMPRES1, deopt); |
2340 } | 2379 } |
2341 break; | 2380 break; |
2342 } | 2381 } |
2343 case Token::kSUB: { | 2382 case Token::kSUB: { |
2344 __ TraceSimMsg("kSUB"); | 2383 __ TraceSimMsg("kSUB"); |
2345 if (deopt == NULL) { | 2384 if (deopt == NULL) { |
2346 __ subu(result, left, right); | 2385 __ subu(result, left, right); |
2347 } else { | 2386 } else { |
2348 __ SubuDetectOverflow(result, left, right, CMPRES); | 2387 __ SubuDetectOverflow(result, left, right, CMPRES1); |
2349 __ bltz(CMPRES, deopt); | 2388 __ bltz(CMPRES1, deopt); |
2350 } | 2389 } |
2351 break; | 2390 break; |
2352 } | 2391 } |
2353 case Token::kMUL: { | 2392 case Token::kMUL: { |
2354 __ TraceSimMsg("kMUL"); | 2393 __ TraceSimMsg("kMUL"); |
2355 __ sra(TMP, left, kSmiTagSize); | 2394 __ sra(TMP, left, kSmiTagSize); |
2356 __ mult(TMP, right); | 2395 __ mult(TMP, right); |
2357 __ mflo(result); | 2396 __ mflo(result); |
2358 if (deopt != NULL) { | 2397 if (deopt != NULL) { |
2359 __ mfhi(CMPRES2); | 2398 __ mfhi(CMPRES2); |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2452 } | 2491 } |
2453 | 2492 |
2454 | 2493 |
2455 void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 2494 void CheckEitherNonSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
2456 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptBinaryDoubleOp); | 2495 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptBinaryDoubleOp); |
2457 intptr_t left_cid = left()->Type()->ToCid(); | 2496 intptr_t left_cid = left()->Type()->ToCid(); |
2458 intptr_t right_cid = right()->Type()->ToCid(); | 2497 intptr_t right_cid = right()->Type()->ToCid(); |
2459 Register left = locs()->in(0).reg(); | 2498 Register left = locs()->in(0).reg(); |
2460 Register right = locs()->in(1).reg(); | 2499 Register right = locs()->in(1).reg(); |
2461 if (left_cid == kSmiCid) { | 2500 if (left_cid == kSmiCid) { |
2462 __ andi(CMPRES, right, Immediate(kSmiTagMask)); | 2501 __ andi(CMPRES1, right, Immediate(kSmiTagMask)); |
2463 } else if (right_cid == kSmiCid) { | 2502 } else if (right_cid == kSmiCid) { |
2464 __ andi(CMPRES, left, Immediate(kSmiTagMask)); | 2503 __ andi(CMPRES1, left, Immediate(kSmiTagMask)); |
2465 } else { | 2504 } else { |
2466 __ or_(TMP, left, right); | 2505 __ or_(TMP, left, right); |
2467 __ andi(CMPRES, TMP, Immediate(kSmiTagMask)); | 2506 __ andi(CMPRES1, TMP, Immediate(kSmiTagMask)); |
2468 } | 2507 } |
2469 __ beq(CMPRES, ZR, deopt); | 2508 __ beq(CMPRES1, ZR, deopt); |
2470 } | 2509 } |
2471 | 2510 |
2472 | 2511 |
2473 LocationSummary* BoxDoubleInstr::MakeLocationSummary() const { | 2512 LocationSummary* BoxDoubleInstr::MakeLocationSummary() const { |
2474 const intptr_t kNumInputs = 1; | 2513 const intptr_t kNumInputs = 1; |
2475 const intptr_t kNumTemps = 0; | 2514 const intptr_t kNumTemps = 0; |
2476 LocationSummary* summary = | 2515 LocationSummary* summary = |
2477 new LocationSummary(kNumInputs, | 2516 new LocationSummary(kNumInputs, |
2478 kNumTemps, | 2517 kNumTemps, |
2479 LocationSummary::kCallOnSlowPath); | 2518 LocationSummary::kCallOnSlowPath); |
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
2555 if (value_cid == kDoubleCid) { | 2594 if (value_cid == kDoubleCid) { |
2556 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); | 2595 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); |
2557 } else if (value_cid == kSmiCid) { | 2596 } else if (value_cid == kSmiCid) { |
2558 __ SmiUntag(value); // Untag input before conversion. | 2597 __ SmiUntag(value); // Untag input before conversion. |
2559 __ mtc1(value, STMP1); | 2598 __ mtc1(value, STMP1); |
2560 __ cvtdw(result, STMP1); | 2599 __ cvtdw(result, STMP1); |
2561 } else { | 2600 } else { |
2562 Label* deopt = compiler->AddDeoptStub(deopt_id_, kDeoptBinaryDoubleOp); | 2601 Label* deopt = compiler->AddDeoptStub(deopt_id_, kDeoptBinaryDoubleOp); |
2563 Label is_smi, done; | 2602 Label is_smi, done; |
2564 | 2603 |
2565 __ andi(CMPRES, value, Immediate(kSmiTagMask)); | 2604 __ andi(CMPRES1, value, Immediate(kSmiTagMask)); |
2566 __ beq(CMPRES, ZR, &is_smi); | 2605 __ beq(CMPRES1, ZR, &is_smi); |
2567 __ LoadClassId(CMPRES1, value); | 2606 __ LoadClassId(CMPRES1, value); |
2568 __ BranchNotEqual(CMPRES1, kDoubleCid, deopt); | 2607 __ BranchNotEqual(CMPRES1, kDoubleCid, deopt); |
2569 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); | 2608 __ LoadDFromOffset(result, value, Double::value_offset() - kHeapObjectTag); |
2570 __ b(&done); | 2609 __ b(&done); |
2571 __ Bind(&is_smi); | 2610 __ Bind(&is_smi); |
2572 // TODO(regis): Why do we preserve value here but not above? | 2611 // TODO(regis): Why do we preserve value here but not above? |
2573 __ sra(TMP, value, 1); | 2612 __ sra(TMP, value, 1); |
2574 __ mtc1(TMP, STMP1); | 2613 __ mtc1(TMP, STMP1); |
2575 __ cvtdw(result, STMP1); | 2614 __ cvtdw(result, STMP1); |
2576 __ Bind(&done); | 2615 __ Bind(&done); |
(...skipping 437 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3014 } | 3053 } |
3015 | 3054 |
3016 | 3055 |
3017 void UnarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3056 void UnarySmiOpInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3018 Register value = locs()->in(0).reg(); | 3057 Register value = locs()->in(0).reg(); |
3019 Register result = locs()->out().reg(); | 3058 Register result = locs()->out().reg(); |
3020 switch (op_kind()) { | 3059 switch (op_kind()) { |
3021 case Token::kNEGATE: { | 3060 case Token::kNEGATE: { |
3022 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 3061 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
3023 kDeoptUnaryOp); | 3062 kDeoptUnaryOp); |
3024 __ SubuDetectOverflow(result, ZR, value, CMPRES); | 3063 __ SubuDetectOverflow(result, ZR, value, CMPRES1); |
3025 __ bltz(CMPRES, deopt); | 3064 __ bltz(CMPRES1, deopt); |
3026 break; | 3065 break; |
3027 } | 3066 } |
3028 case Token::kBIT_NOT: | 3067 case Token::kBIT_NOT: |
3029 __ nor(result, value, ZR); | 3068 __ nor(result, value, ZR); |
3030 __ addiu(result, result, Immediate(-1)); // Remove inverted smi-tag. | 3069 __ addiu(result, result, Immediate(-1)); // Remove inverted smi-tag. |
3031 break; | 3070 break; |
3032 default: | 3071 default: |
3033 UNREACHABLE(); | 3072 UNREACHABLE(); |
3034 } | 3073 } |
3035 } | 3074 } |
(...skipping 61 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3097 ASSERT(result == V0); | 3136 ASSERT(result == V0); |
3098 ASSERT(result != value_obj); | 3137 ASSERT(result != value_obj); |
3099 __ LoadDFromOffset(DTMP, value_obj, Double::value_offset() - kHeapObjectTag); | 3138 __ LoadDFromOffset(DTMP, value_obj, Double::value_offset() - kHeapObjectTag); |
3100 __ cvtwd(STMP1, DTMP); | 3139 __ cvtwd(STMP1, DTMP); |
3101 __ mfc1(result, STMP1); | 3140 __ mfc1(result, STMP1); |
3102 | 3141 |
3103 // Overflow is signaled with minint. | 3142 // Overflow is signaled with minint. |
3104 Label do_call, done; | 3143 Label do_call, done; |
3105 // Check for overflow and that it fits into Smi. | 3144 // Check for overflow and that it fits into Smi. |
3106 __ LoadImmediate(TMP, 0xC0000000); | 3145 __ LoadImmediate(TMP, 0xC0000000); |
3107 __ subu(CMPRES, result, TMP); | 3146 __ subu(CMPRES1, result, TMP); |
3108 __ bltz(CMPRES, &do_call); | 3147 __ bltz(CMPRES1, &do_call); |
3109 __ SmiTag(result); | 3148 __ SmiTag(result); |
3110 __ b(&done); | 3149 __ b(&done); |
3111 __ Bind(&do_call); | 3150 __ Bind(&do_call); |
3112 __ Push(value_obj); | 3151 __ Push(value_obj); |
3113 ASSERT(instance_call()->HasICData()); | 3152 ASSERT(instance_call()->HasICData()); |
3114 const ICData& ic_data = *instance_call()->ic_data(); | 3153 const ICData& ic_data = *instance_call()->ic_data(); |
3115 ASSERT((ic_data.NumberOfChecks() == 1)); | 3154 ASSERT((ic_data.NumberOfChecks() == 1)); |
3116 const Function& target = Function::ZoneHandle(ic_data.GetTargetAt(0)); | 3155 const Function& target = Function::ZoneHandle(ic_data.GetTargetAt(0)); |
3117 | 3156 |
3118 const intptr_t kNumberOfArguments = 1; | 3157 const intptr_t kNumberOfArguments = 1; |
(...skipping 20 matching lines...) Expand all Loading... | |
3139 | 3178 |
3140 void DoubleToSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3179 void DoubleToSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3141 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptDoubleToSmi); | 3180 Label* deopt = compiler->AddDeoptStub(deopt_id(), kDeoptDoubleToSmi); |
3142 Register result = locs()->out().reg(); | 3181 Register result = locs()->out().reg(); |
3143 DRegister value = locs()->in(0).fpu_reg(); | 3182 DRegister value = locs()->in(0).fpu_reg(); |
3144 __ cvtwd(STMP1, value); | 3183 __ cvtwd(STMP1, value); |
3145 __ mfc1(result, STMP1); | 3184 __ mfc1(result, STMP1); |
3146 | 3185 |
3147 // Check for overflow and that it fits into Smi. | 3186 // Check for overflow and that it fits into Smi. |
3148 __ LoadImmediate(TMP, 0xC0000000); | 3187 __ LoadImmediate(TMP, 0xC0000000); |
3149 __ subu(CMPRES, result, TMP); | 3188 __ subu(CMPRES1, result, TMP); |
3150 __ bltz(CMPRES, deopt); | 3189 __ bltz(CMPRES1, deopt); |
3151 __ SmiTag(result); | 3190 __ SmiTag(result); |
3152 } | 3191 } |
3153 | 3192 |
3154 | 3193 |
3155 LocationSummary* DoubleToDoubleInstr::MakeLocationSummary() const { | 3194 LocationSummary* DoubleToDoubleInstr::MakeLocationSummary() const { |
3156 UNIMPLEMENTED(); | 3195 UNIMPLEMENTED(); |
3157 return NULL; | 3196 return NULL; |
3158 } | 3197 } |
3159 | 3198 |
3160 | 3199 |
(...skipping 143 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3304 | 3343 |
3305 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || | 3344 ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) || |
3306 (unary_checks().NumberOfChecks() > 1)); | 3345 (unary_checks().NumberOfChecks() > 1)); |
3307 Register value = locs()->in(0).reg(); | 3346 Register value = locs()->in(0).reg(); |
3308 Register temp = locs()->temp(0).reg(); | 3347 Register temp = locs()->temp(0).reg(); |
3309 Label* deopt = compiler->AddDeoptStub(deopt_id(), | 3348 Label* deopt = compiler->AddDeoptStub(deopt_id(), |
3310 kDeoptCheckClass); | 3349 kDeoptCheckClass); |
3311 Label is_ok; | 3350 Label is_ok; |
3312 intptr_t cix = 0; | 3351 intptr_t cix = 0; |
3313 if (unary_checks().GetReceiverClassIdAt(cix) == kSmiCid) { | 3352 if (unary_checks().GetReceiverClassIdAt(cix) == kSmiCid) { |
3314 __ andi(CMPRES, value, Immediate(kSmiTagMask)); | 3353 __ andi(CMPRES1, value, Immediate(kSmiTagMask)); |
3315 __ beq(CMPRES, ZR, &is_ok); | 3354 __ beq(CMPRES1, ZR, &is_ok); |
3316 cix++; // Skip first check. | 3355 cix++; // Skip first check. |
3317 } else { | 3356 } else { |
3318 __ andi(CMPRES, value, Immediate(kSmiTagMask)); | 3357 __ andi(CMPRES1, value, Immediate(kSmiTagMask)); |
3319 __ beq(CMPRES, ZR, deopt); | 3358 __ beq(CMPRES1, ZR, deopt); |
3320 } | 3359 } |
3321 __ LoadClassId(temp, value); | 3360 __ LoadClassId(temp, value); |
3322 const intptr_t num_checks = unary_checks().NumberOfChecks(); | 3361 const intptr_t num_checks = unary_checks().NumberOfChecks(); |
3323 for (intptr_t i = cix; i < num_checks; i++) { | 3362 for (intptr_t i = cix; i < num_checks; i++) { |
3324 ASSERT(unary_checks().GetReceiverClassIdAt(i) != kSmiCid); | 3363 ASSERT(unary_checks().GetReceiverClassIdAt(i) != kSmiCid); |
3325 __ LoadImmediate(TMP1, unary_checks().GetReceiverClassIdAt(i)); | 3364 __ LoadImmediate(TMP, unary_checks().GetReceiverClassIdAt(i)); |
3326 __ subu(CMPRES, temp, TMP1); | 3365 __ subu(CMPRES1, temp, TMP); |
3327 if (i == (num_checks - 1)) { | 3366 if (i == (num_checks - 1)) { |
3328 __ bne(CMPRES, ZR, deopt); | 3367 __ bne(CMPRES1, ZR, deopt); |
3329 } else { | 3368 } else { |
3330 __ beq(CMPRES, ZR, &is_ok); | 3369 __ beq(CMPRES1, ZR, &is_ok); |
3331 } | 3370 } |
3332 } | 3371 } |
3333 __ Bind(&is_ok); | 3372 __ Bind(&is_ok); |
3334 } | 3373 } |
3335 | 3374 |
3336 | 3375 |
3337 LocationSummary* CheckSmiInstr::MakeLocationSummary() const { | 3376 LocationSummary* CheckSmiInstr::MakeLocationSummary() const { |
3338 const intptr_t kNumInputs = 1; | 3377 const intptr_t kNumInputs = 1; |
3339 const intptr_t kNumTemps = 0; | 3378 const intptr_t kNumTemps = 0; |
3340 LocationSummary* summary = | 3379 LocationSummary* summary = |
(...skipping 226 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3567 bool value) { | 3606 bool value) { |
3568 __ TraceSimMsg("ControlInstruction::EmitBranchOnValue"); | 3607 __ TraceSimMsg("ControlInstruction::EmitBranchOnValue"); |
3569 if (value && !compiler->CanFallThroughTo(true_successor())) { | 3608 if (value && !compiler->CanFallThroughTo(true_successor())) { |
3570 __ b(compiler->GetJumpLabel(true_successor())); | 3609 __ b(compiler->GetJumpLabel(true_successor())); |
3571 } else if (!value && !compiler->CanFallThroughTo(false_successor())) { | 3610 } else if (!value && !compiler->CanFallThroughTo(false_successor())) { |
3572 __ b(compiler->GetJumpLabel(false_successor())); | 3611 __ b(compiler->GetJumpLabel(false_successor())); |
3573 } | 3612 } |
3574 } | 3613 } |
3575 | 3614 |
3576 | 3615 |
3577 // The comparison result is in CMPRES. | 3616 // The comparison result is in CMPRES1. |
3578 void ControlInstruction::EmitBranchOnCondition(FlowGraphCompiler* compiler, | 3617 void ControlInstruction::EmitBranchOnCondition(FlowGraphCompiler* compiler, |
3579 Condition true_condition) { | 3618 Condition true_condition) { |
3580 __ TraceSimMsg("ControlInstruction::EmitBranchOnCondition"); | 3619 __ TraceSimMsg("ControlInstruction::EmitBranchOnCondition"); |
3581 if (compiler->CanFallThroughTo(false_successor())) { | 3620 if (compiler->CanFallThroughTo(false_successor())) { |
3582 // If the next block is the false successor, fall through to it. | 3621 // If the next block is the false successor, fall through to it. |
3583 Label* label = compiler->GetJumpLabel(true_successor()); | 3622 Label* label = compiler->GetJumpLabel(true_successor()); |
3584 EmitBranchAfterCompare(compiler, true_condition, label); | 3623 EmitBranchAfterCompare(compiler, true_condition, label); |
3585 } else { | 3624 } else { |
3586 // If the next block is not the false successor, branch to it. | 3625 // If the next block is not the false successor, branch to it. |
3587 Condition false_condition = NegateCondition(true_condition); | 3626 Condition false_condition = NegateCondition(true_condition); |
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3708 Location::RequiresRegister(), | 3747 Location::RequiresRegister(), |
3709 LocationSummary::kNoCall); | 3748 LocationSummary::kNoCall); |
3710 } | 3749 } |
3711 | 3750 |
3712 | 3751 |
3713 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { | 3752 void BooleanNegateInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
3714 Register value = locs()->in(0).reg(); | 3753 Register value = locs()->in(0).reg(); |
3715 Register result = locs()->out().reg(); | 3754 Register result = locs()->out().reg(); |
3716 | 3755 |
3717 __ LoadObject(result, Bool::True()); | 3756 __ LoadObject(result, Bool::True()); |
3718 __ LoadObject(TMP1, Bool::False()); | 3757 __ LoadObject(TMP, Bool::False()); |
3719 __ subu(CMPRES, value, result); | 3758 __ subu(CMPRES1, value, result); |
3720 __ movz(result, TMP1, CMPRES); // If value is True, move False into result. | 3759 __ movz(result, TMP, CMPRES1); // If value is True, move False into result. |
3721 } | 3760 } |
3722 | 3761 |
3723 | 3762 |
3724 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { | 3763 LocationSummary* StoreVMFieldInstr::MakeLocationSummary() const { |
3725 const intptr_t kNumInputs = 2; | 3764 const intptr_t kNumInputs = 2; |
3726 const intptr_t kNumTemps = 0; | 3765 const intptr_t kNumTemps = 0; |
3727 LocationSummary* locs = | 3766 LocationSummary* locs = |
3728 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); | 3767 new LocationSummary(kNumInputs, kNumTemps, LocationSummary::kNoCall); |
3729 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() | 3768 locs->set_in(0, value()->NeedsStoreBuffer() ? Location::WritableRegister() |
3730 : Location::RequiresRegister()); | 3769 : Location::RequiresRegister()); |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3780 compiler->GenerateCall(token_pos(), | 3819 compiler->GenerateCall(token_pos(), |
3781 &label, | 3820 &label, |
3782 PcDescriptors::kOther, | 3821 PcDescriptors::kOther, |
3783 locs()); | 3822 locs()); |
3784 __ Drop(2); // Discard type arguments and receiver. | 3823 __ Drop(2); // Discard type arguments and receiver. |
3785 } | 3824 } |
3786 | 3825 |
3787 } // namespace dart | 3826 } // namespace dart |
3788 | 3827 |
3789 #endif // defined TARGET_ARCH_MIPS | 3828 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |