| OLD | NEW |
| 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
| 6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
| 7 | 7 |
| 8 #include "vm/intrinsifier.h" | 8 #include "vm/intrinsifier.h" |
| 9 | 9 |
| 10 #include "vm/assembler.h" | 10 #include "vm/assembler.h" |
| (...skipping 11 matching lines...) Expand all Loading... |
| 22 | 22 |
| 23 | 23 |
| 24 void Intrinsifier::Array_getLength(Assembler* assembler) { | 24 void Intrinsifier::Array_getLength(Assembler* assembler) { |
| 25 __ lw(V0, Address(SP, 0 * kWordSize)); | 25 __ lw(V0, Address(SP, 0 * kWordSize)); |
| 26 __ Ret(); | 26 __ Ret(); |
| 27 __ delay_slot()->lw(V0, FieldAddress(V0, Array::length_offset())); | 27 __ delay_slot()->lw(V0, FieldAddress(V0, Array::length_offset())); |
| 28 } | 28 } |
| 29 | 29 |
| 30 | 30 |
| 31 void Intrinsifier::ImmutableList_getLength(Assembler* assembler) { | 31 void Intrinsifier::ImmutableList_getLength(Assembler* assembler) { |
| 32 return Array_getLength(assembler); | 32 Array_getLength(assembler); |
| 33 } | 33 } |
| 34 | 34 |
| 35 | 35 |
| 36 void Intrinsifier::Array_getIndexed(Assembler* assembler) { | 36 void Intrinsifier::Array_getIndexed(Assembler* assembler) { |
| 37 Label fall_through; | 37 Label fall_through; |
| 38 | 38 |
| 39 __ lw(T0, Address(SP, + 0 * kWordSize)); // Index | 39 __ lw(T0, Address(SP, + 0 * kWordSize)); // Index |
| 40 | 40 |
| 41 __ andi(CMPRES1, T0, Immediate(kSmiTagMask)); | 41 __ andi(CMPRES1, T0, Immediate(kSmiTagMask)); |
| 42 __ bne(CMPRES1, ZR, &fall_through); // Index is not an smi, fall through | 42 __ bne(CMPRES1, ZR, &fall_through); // Index is not an smi, fall through |
| 43 __ delay_slot()->lw(T1, Address(SP, + 1 * kWordSize)); // Array | 43 __ delay_slot()->lw(T1, Address(SP, + 1 * kWordSize)); // Array |
| 44 | 44 |
| 45 // range check | 45 // range check |
| 46 __ lw(T2, FieldAddress(T1, Array::length_offset())); | 46 __ lw(T2, FieldAddress(T1, Array::length_offset())); |
| 47 __ BranchUnsignedGreaterEqual(T0, T2, &fall_through); | 47 __ BranchUnsignedGreaterEqual(T0, T2, &fall_through); |
| 48 | 48 |
| 49 ASSERT(kSmiTagShift == 1); | 49 ASSERT(kSmiTagShift == 1); |
| 50 // array element at T1 + T0*2 + Array::data_offset - 1 | 50 // array element at T1 + T0*2 + Array::data_offset - 1 |
| 51 __ sll(T2, T0, 1); | 51 __ sll(T2, T0, 1); |
| 52 __ addu(T2, T1, T2); | 52 __ addu(T2, T1, T2); |
| 53 __ Ret(); | 53 __ Ret(); |
| 54 __ delay_slot()->lw(V0, FieldAddress(T2, Array::data_offset())); | 54 __ delay_slot()->lw(V0, FieldAddress(T2, Array::data_offset())); |
| 55 __ Bind(&fall_through); | 55 __ Bind(&fall_through); |
| 56 } | 56 } |
| 57 | 57 |
| 58 | 58 |
| 59 void Intrinsifier::ImmutableList_getIndexed(Assembler* assembler) { | 59 void Intrinsifier::ImmutableList_getIndexed(Assembler* assembler) { |
| 60 return Array_getIndexed(assembler); | 60 Array_getIndexed(assembler); |
| 61 } | 61 } |
| 62 | 62 |
| 63 | 63 |
| 64 static intptr_t ComputeObjectArrayTypeArgumentsOffset() { | 64 static intptr_t ComputeObjectArrayTypeArgumentsOffset() { |
| 65 const Library& core_lib = Library::Handle(Library::CoreLibrary()); | 65 const Library& core_lib = Library::Handle(Library::CoreLibrary()); |
| 66 const Class& cls = Class::Handle( | 66 const Class& cls = Class::Handle( |
| 67 core_lib.LookupClassAllowPrivate(Symbols::_List())); | 67 core_lib.LookupClassAllowPrivate(Symbols::_List())); |
| 68 ASSERT(!cls.IsNull()); | 68 ASSERT(!cls.IsNull()); |
| 69 ASSERT(cls.NumTypeArguments() == 1); | 69 ASSERT(cls.NumTypeArguments() == 1); |
| 70 const intptr_t field_offset = cls.type_arguments_field_offset(); | 70 const intptr_t field_offset = cls.type_arguments_field_offset(); |
| (...skipping 413 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 484 | 484 |
| 485 TestBothArgumentsSmis(assembler, &fall_through); // Checks two Smis. | 485 TestBothArgumentsSmis(assembler, &fall_through); // Checks two Smis. |
| 486 __ AdduDetectOverflow(V0, T0, T1, CMPRES1); // Add. | 486 __ AdduDetectOverflow(V0, T0, T1, CMPRES1); // Add. |
| 487 __ bltz(CMPRES1, &fall_through); // Fall through on overflow. | 487 __ bltz(CMPRES1, &fall_through); // Fall through on overflow. |
| 488 __ Ret(); // Nothing in branch delay slot. | 488 __ Ret(); // Nothing in branch delay slot. |
| 489 __ Bind(&fall_through); | 489 __ Bind(&fall_through); |
| 490 } | 490 } |
| 491 | 491 |
| 492 | 492 |
| 493 void Intrinsifier::Integer_add(Assembler* assembler) { | 493 void Intrinsifier::Integer_add(Assembler* assembler) { |
| 494 return Integer_addFromInteger(assembler); | 494 Integer_addFromInteger(assembler); |
| 495 } | 495 } |
| 496 | 496 |
| 497 | 497 |
| 498 void Intrinsifier::Integer_subFromInteger(Assembler* assembler) { | 498 void Intrinsifier::Integer_subFromInteger(Assembler* assembler) { |
| 499 Label fall_through; | 499 Label fall_through; |
| 500 | 500 |
| 501 TestBothArgumentsSmis(assembler, &fall_through); | 501 TestBothArgumentsSmis(assembler, &fall_through); |
| 502 __ SubuDetectOverflow(V0, T0, T1, CMPRES1); // Subtract. | 502 __ SubuDetectOverflow(V0, T0, T1, CMPRES1); // Subtract. |
| 503 __ bltz(CMPRES1, &fall_through); // Fall through on overflow. | 503 __ bltz(CMPRES1, &fall_through); // Fall through on overflow. |
| 504 __ Ret(); | 504 __ Ret(); |
| (...skipping 22 matching lines...) Expand all Loading... |
| 527 __ mflo(V0); // V0 <- LO. | 527 __ mflo(V0); // V0 <- LO. |
| 528 __ mfhi(T2); // T2 <- HI. | 528 __ mfhi(T2); // T2 <- HI. |
| 529 __ sra(T3, V0, 31); // T3 <- V0 >> 31. | 529 __ sra(T3, V0, 31); // T3 <- V0 >> 31. |
| 530 __ bne(T2, T3, &fall_through); // Fall through on overflow. | 530 __ bne(T2, T3, &fall_through); // Fall through on overflow. |
| 531 __ Ret(); | 531 __ Ret(); |
| 532 __ Bind(&fall_through); | 532 __ Bind(&fall_through); |
| 533 } | 533 } |
| 534 | 534 |
| 535 | 535 |
| 536 void Intrinsifier::Integer_mul(Assembler* assembler) { | 536 void Intrinsifier::Integer_mul(Assembler* assembler) { |
| 537 return Integer_mulFromInteger(assembler); | 537 Integer_mulFromInteger(assembler); |
| 538 } | 538 } |
| 539 | 539 |
| 540 | 540 |
| 541 // Optimizations: | 541 // Optimizations: |
| 542 // - result is 0 if: | 542 // - result is 0 if: |
| 543 // - left is 0 | 543 // - left is 0 |
| 544 // - left equals right | 544 // - left equals right |
| 545 // - result is left if | 545 // - result is left if |
| 546 // - left > 0 && left < right | 546 // - left > 0 && left < right |
| 547 // T1: Tagged left (dividend). | 547 // T1: Tagged left (dividend). |
| (...skipping 108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 656 Label fall_through; | 656 Label fall_through; |
| 657 | 657 |
| 658 TestBothArgumentsSmis(assembler, &fall_through); // Checks two smis. | 658 TestBothArgumentsSmis(assembler, &fall_through); // Checks two smis. |
| 659 __ Ret(); | 659 __ Ret(); |
| 660 __ delay_slot()->and_(V0, T0, T1); | 660 __ delay_slot()->and_(V0, T0, T1); |
| 661 __ Bind(&fall_through); | 661 __ Bind(&fall_through); |
| 662 } | 662 } |
| 663 | 663 |
| 664 | 664 |
| 665 void Intrinsifier::Integer_bitAnd(Assembler* assembler) { | 665 void Intrinsifier::Integer_bitAnd(Assembler* assembler) { |
| 666 return Integer_bitAndFromInteger(assembler); | 666 Integer_bitAndFromInteger(assembler); |
| 667 } | 667 } |
| 668 | 668 |
| 669 | 669 |
| 670 void Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) { | 670 void Intrinsifier::Integer_bitOrFromInteger(Assembler* assembler) { |
| 671 Label fall_through; | 671 Label fall_through; |
| 672 | 672 |
| 673 TestBothArgumentsSmis(assembler, &fall_through); // Checks two smis. | 673 TestBothArgumentsSmis(assembler, &fall_through); // Checks two smis. |
| 674 __ Ret(); | 674 __ Ret(); |
| 675 __ delay_slot()->or_(V0, T0, T1); | 675 __ delay_slot()->or_(V0, T0, T1); |
| 676 __ Bind(&fall_through); | 676 __ Bind(&fall_through); |
| 677 } | 677 } |
| 678 | 678 |
| 679 | 679 |
| 680 void Intrinsifier::Integer_bitOr(Assembler* assembler) { | 680 void Intrinsifier::Integer_bitOr(Assembler* assembler) { |
| 681 return Integer_bitOrFromInteger(assembler); | 681 Integer_bitOrFromInteger(assembler); |
| 682 } | 682 } |
| 683 | 683 |
| 684 | 684 |
| 685 void Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) { | 685 void Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) { |
| 686 Label fall_through; | 686 Label fall_through; |
| 687 | 687 |
| 688 TestBothArgumentsSmis(assembler, &fall_through); // Checks two smis. | 688 TestBothArgumentsSmis(assembler, &fall_through); // Checks two smis. |
| 689 __ Ret(); | 689 __ Ret(); |
| 690 __ delay_slot()->xor_(V0, T0, T1); | 690 __ delay_slot()->xor_(V0, T0, T1); |
| 691 __ Bind(&fall_through); | 691 __ Bind(&fall_through); |
| 692 } | 692 } |
| 693 | 693 |
| 694 | 694 |
| 695 void Intrinsifier::Integer_bitXor(Assembler* assembler) { | 695 void Intrinsifier::Integer_bitXor(Assembler* assembler) { |
| 696 return Integer_bitXorFromInteger(assembler); | 696 Integer_bitXorFromInteger(assembler); |
| 697 } | 697 } |
| 698 | 698 |
| 699 | 699 |
| 700 void Intrinsifier::Integer_shl(Assembler* assembler) { | 700 void Intrinsifier::Integer_shl(Assembler* assembler) { |
| 701 ASSERT(kSmiTagShift == 1); | 701 ASSERT(kSmiTagShift == 1); |
| 702 ASSERT(kSmiTag == 0); | 702 ASSERT(kSmiTag == 0); |
| 703 Label fall_through, overflow; | 703 Label fall_through, overflow; |
| 704 | 704 |
| 705 TestBothArgumentsSmis(assembler, &fall_through); | 705 TestBothArgumentsSmis(assembler, &fall_through); |
| 706 __ BranchUnsignedGreater(T0, Smi::RawValue(Smi::kBits), &fall_through); | 706 __ BranchUnsignedGreater(T0, Smi::RawValue(Smi::kBits), &fall_through); |
| (...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 840 break; | 840 break; |
| 841 } | 841 } |
| 842 // Else is true. | 842 // Else is true. |
| 843 __ b(&is_true); | 843 __ b(&is_true); |
| 844 | 844 |
| 845 __ Bind(&fall_through); | 845 __ Bind(&fall_through); |
| 846 } | 846 } |
| 847 | 847 |
| 848 | 848 |
| 849 void Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) { | 849 void Intrinsifier::Integer_greaterThanFromInt(Assembler* assembler) { |
| 850 return CompareIntegers(assembler, LT); | 850 CompareIntegers(assembler, LT); |
| 851 } | 851 } |
| 852 | 852 |
| 853 | 853 |
| 854 void Intrinsifier::Integer_lessThan(Assembler* assembler) { | 854 void Intrinsifier::Integer_lessThan(Assembler* assembler) { |
| 855 return Integer_greaterThanFromInt(assembler); | 855 Integer_greaterThanFromInt(assembler); |
| 856 } | 856 } |
| 857 | 857 |
| 858 | 858 |
| 859 void Intrinsifier::Integer_greaterThan(Assembler* assembler) { | 859 void Intrinsifier::Integer_greaterThan(Assembler* assembler) { |
| 860 return CompareIntegers(assembler, GT); | 860 CompareIntegers(assembler, GT); |
| 861 } | 861 } |
| 862 | 862 |
| 863 | 863 |
| 864 void Intrinsifier::Integer_lessEqualThan(Assembler* assembler) { | 864 void Intrinsifier::Integer_lessEqualThan(Assembler* assembler) { |
| 865 return CompareIntegers(assembler, LE); | 865 CompareIntegers(assembler, LE); |
| 866 } | 866 } |
| 867 | 867 |
| 868 | 868 |
| 869 void Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) { | 869 void Intrinsifier::Integer_greaterEqualThan(Assembler* assembler) { |
| 870 return CompareIntegers(assembler, GE); | 870 CompareIntegers(assembler, GE); |
| 871 } | 871 } |
| 872 | 872 |
| 873 | 873 |
| 874 // This is called for Smi, Mint and Bigint receivers. The right argument | 874 // This is called for Smi, Mint and Bigint receivers. The right argument |
| 875 // can be Smi, Mint, Bigint or double. | 875 // can be Smi, Mint, Bigint or double. |
| 876 void Intrinsifier::Integer_equalToInteger(Assembler* assembler) { | 876 void Intrinsifier::Integer_equalToInteger(Assembler* assembler) { |
| 877 Label fall_through, true_label, check_for_mint; | 877 Label fall_through, true_label, check_for_mint; |
| 878 // For integer receiver '===' check first. | 878 // For integer receiver '===' check first. |
| 879 __ lw(T0, Address(SP, 0 * kWordSize)); | 879 __ lw(T0, Address(SP, 0 * kWordSize)); |
| 880 __ lw(T1, Address(SP, 1 * kWordSize)); | 880 __ lw(T1, Address(SP, 1 * kWordSize)); |
| (...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 918 __ bne(CMPRES1, ZR, &fall_through); | 918 __ bne(CMPRES1, ZR, &fall_through); |
| 919 __ LoadObject(V0, Bool::False()); | 919 __ LoadObject(V0, Bool::False()); |
| 920 __ Ret(); | 920 __ Ret(); |
| 921 // TODO(srdjan): Implement Mint == Mint comparison. | 921 // TODO(srdjan): Implement Mint == Mint comparison. |
| 922 | 922 |
| 923 __ Bind(&fall_through); | 923 __ Bind(&fall_through); |
| 924 } | 924 } |
| 925 | 925 |
| 926 | 926 |
| 927 void Intrinsifier::Integer_equal(Assembler* assembler) { | 927 void Intrinsifier::Integer_equal(Assembler* assembler) { |
| 928 return Integer_equalToInteger(assembler); | 928 Integer_equalToInteger(assembler); |
| 929 } | 929 } |
| 930 | 930 |
| 931 | 931 |
| 932 void Intrinsifier::Integer_sar(Assembler* assembler) { | 932 void Intrinsifier::Integer_sar(Assembler* assembler) { |
| 933 Label fall_through; | 933 Label fall_through; |
| 934 | 934 |
| 935 TestBothArgumentsSmis(assembler, &fall_through); | 935 TestBothArgumentsSmis(assembler, &fall_through); |
| 936 // Shift amount in T0. Value to shift in T1. | 936 // Shift amount in T0. Value to shift in T1. |
| 937 | 937 |
| 938 __ SmiUntag(T0); | 938 __ SmiUntag(T0); |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1026 __ SmiUntag(T0); | 1026 __ SmiUntag(T0); |
| 1027 __ mtc1(T0, STMP1); | 1027 __ mtc1(T0, STMP1); |
| 1028 __ cvtdw(D1, STMP1); | 1028 __ cvtdw(D1, STMP1); |
| 1029 __ b(&double_op); | 1029 __ b(&double_op); |
| 1030 | 1030 |
| 1031 __ Bind(&fall_through); | 1031 __ Bind(&fall_through); |
| 1032 } | 1032 } |
| 1033 | 1033 |
| 1034 | 1034 |
| 1035 void Intrinsifier::Double_greaterThan(Assembler* assembler) { | 1035 void Intrinsifier::Double_greaterThan(Assembler* assembler) { |
| 1036 return CompareDoubles(assembler, GT); | 1036 CompareDoubles(assembler, GT); |
| 1037 } | 1037 } |
| 1038 | 1038 |
| 1039 | 1039 |
| 1040 void Intrinsifier::Double_greaterEqualThan(Assembler* assembler) { | 1040 void Intrinsifier::Double_greaterEqualThan(Assembler* assembler) { |
| 1041 return CompareDoubles(assembler, GE); | 1041 CompareDoubles(assembler, GE); |
| 1042 } | 1042 } |
| 1043 | 1043 |
| 1044 | 1044 |
| 1045 void Intrinsifier::Double_lessThan(Assembler* assembler) { | 1045 void Intrinsifier::Double_lessThan(Assembler* assembler) { |
| 1046 return CompareDoubles(assembler, LT); | 1046 CompareDoubles(assembler, LT); |
| 1047 } | 1047 } |
| 1048 | 1048 |
| 1049 | 1049 |
| 1050 void Intrinsifier::Double_equal(Assembler* assembler) { | 1050 void Intrinsifier::Double_equal(Assembler* assembler) { |
| 1051 return CompareDoubles(assembler, EQ); | 1051 CompareDoubles(assembler, EQ); |
| 1052 } | 1052 } |
| 1053 | 1053 |
| 1054 | 1054 |
| 1055 void Intrinsifier::Double_lessEqualThan(Assembler* assembler) { | 1055 void Intrinsifier::Double_lessEqualThan(Assembler* assembler) { |
| 1056 return CompareDoubles(assembler, LE); | 1056 CompareDoubles(assembler, LE); |
| 1057 } | 1057 } |
| 1058 | 1058 |
| 1059 | 1059 |
| 1060 // Expects left argument to be double (receiver). Right argument is unknown. | 1060 // Expects left argument to be double (receiver). Right argument is unknown. |
| 1061 // Both arguments are on stack. | 1061 // Both arguments are on stack. |
| 1062 static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { | 1062 static void DoubleArithmeticOperations(Assembler* assembler, Token::Kind kind) { |
| 1063 Label fall_through; | 1063 Label fall_through; |
| 1064 | 1064 |
| 1065 TestLastArgumentIsDouble(assembler, &fall_through, &fall_through); | 1065 TestLastArgumentIsDouble(assembler, &fall_through, &fall_through); |
| 1066 // Both arguments are double, right operand is in T0. | 1066 // Both arguments are double, right operand is in T0. |
| (...skipping 14 matching lines...) Expand all Loading... |
| 1081 __ TryAllocate(double_class, &fall_through, V0, T1); // Result register. | 1081 __ TryAllocate(double_class, &fall_through, V0, T1); // Result register. |
| 1082 __ swc1(F0, FieldAddress(V0, Double::value_offset())); | 1082 __ swc1(F0, FieldAddress(V0, Double::value_offset())); |
| 1083 __ Ret(); | 1083 __ Ret(); |
| 1084 __ delay_slot()->swc1(F1, | 1084 __ delay_slot()->swc1(F1, |
| 1085 FieldAddress(V0, Double::value_offset() + kWordSize)); | 1085 FieldAddress(V0, Double::value_offset() + kWordSize)); |
| 1086 __ Bind(&fall_through); | 1086 __ Bind(&fall_through); |
| 1087 } | 1087 } |
| 1088 | 1088 |
| 1089 | 1089 |
| 1090 void Intrinsifier::Double_add(Assembler* assembler) { | 1090 void Intrinsifier::Double_add(Assembler* assembler) { |
| 1091 return DoubleArithmeticOperations(assembler, Token::kADD); | 1091 DoubleArithmeticOperations(assembler, Token::kADD); |
| 1092 } | 1092 } |
| 1093 | 1093 |
| 1094 | 1094 |
| 1095 void Intrinsifier::Double_mul(Assembler* assembler) { | 1095 void Intrinsifier::Double_mul(Assembler* assembler) { |
| 1096 return DoubleArithmeticOperations(assembler, Token::kMUL); | 1096 DoubleArithmeticOperations(assembler, Token::kMUL); |
| 1097 } | 1097 } |
| 1098 | 1098 |
| 1099 | 1099 |
| 1100 void Intrinsifier::Double_sub(Assembler* assembler) { | 1100 void Intrinsifier::Double_sub(Assembler* assembler) { |
| 1101 return DoubleArithmeticOperations(assembler, Token::kSUB); | 1101 DoubleArithmeticOperations(assembler, Token::kSUB); |
| 1102 } | 1102 } |
| 1103 | 1103 |
| 1104 | 1104 |
| 1105 void Intrinsifier::Double_div(Assembler* assembler) { | 1105 void Intrinsifier::Double_div(Assembler* assembler) { |
| 1106 return DoubleArithmeticOperations(assembler, Token::kDIV); | 1106 DoubleArithmeticOperations(assembler, Token::kDIV); |
| 1107 } | 1107 } |
| 1108 | 1108 |
| 1109 | 1109 |
| 1110 // Left is double right is integer (Bigint, Mint or Smi) | 1110 // Left is double right is integer (Bigint, Mint or Smi) |
| 1111 void Intrinsifier::Double_mulFromInteger(Assembler* assembler) { | 1111 void Intrinsifier::Double_mulFromInteger(Assembler* assembler) { |
| 1112 Label fall_through; | 1112 Label fall_through; |
| 1113 // Only smis allowed. | 1113 // Only smis allowed. |
| 1114 __ lw(T0, Address(SP, 0 * kWordSize)); | 1114 __ lw(T0, Address(SP, 0 * kWordSize)); |
| 1115 __ andi(CMPRES1, T0, Immediate(kSmiTagMask)); | 1115 __ andi(CMPRES1, T0, Immediate(kSmiTagMask)); |
| 1116 __ bne(CMPRES1, ZR, &fall_through); | 1116 __ bne(CMPRES1, ZR, &fall_through); |
| (...skipping 584 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1701 Isolate* isolate = Isolate::Current(); | 1701 Isolate* isolate = Isolate::Current(); |
| 1702 __ LoadImmediate(V0, reinterpret_cast<uword>(isolate)); | 1702 __ LoadImmediate(V0, reinterpret_cast<uword>(isolate)); |
| 1703 // Set return value. | 1703 // Set return value. |
| 1704 __ Ret(); | 1704 __ Ret(); |
| 1705 __ delay_slot()->lw(V0, Address(V0, Isolate::current_tag_offset())); | 1705 __ delay_slot()->lw(V0, Address(V0, Isolate::current_tag_offset())); |
| 1706 } | 1706 } |
| 1707 | 1707 |
| 1708 } // namespace dart | 1708 } // namespace dart |
| 1709 | 1709 |
| 1710 #endif // defined TARGET_ARCH_MIPS | 1710 #endif // defined TARGET_ARCH_MIPS |
| OLD | NEW |