OLD | NEW |
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file |
2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
4 | 4 |
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_MIPS. |
6 #if defined(TARGET_ARCH_MIPS) | 6 #if defined(TARGET_ARCH_MIPS) |
7 | 7 |
8 #include "vm/intrinsifier.h" | 8 #include "vm/intrinsifier.h" |
9 | 9 |
10 #include "vm/assembler.h" | 10 #include "vm/assembler.h" |
(...skipping 91 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
102 __ StoreIntoObjectNoBarrier(T0, | 102 __ StoreIntoObjectNoBarrier(T0, |
103 FieldAddress(T0, Array::type_arguments_offset()), | 103 FieldAddress(T0, Array::type_arguments_offset()), |
104 T2); | 104 T2); |
105 | 105 |
106 // Set the length field. | 106 // Set the length field. |
107 __ lw(T2, Address(SP, kArrayLengthOffset)); // Array Length. | 107 __ lw(T2, Address(SP, kArrayLengthOffset)); // Array Length. |
108 __ StoreIntoObjectNoBarrier(T0, | 108 __ StoreIntoObjectNoBarrier(T0, |
109 FieldAddress(T0, Array::length_offset()), | 109 FieldAddress(T0, Array::length_offset()), |
110 T2); | 110 T2); |
111 | 111 |
| 112 __ LoadImmediate(T7, reinterpret_cast<int32_t>(Object::null())); |
112 // Initialize all array elements to raw_null. | 113 // Initialize all array elements to raw_null. |
113 // T0: new object start as a tagged pointer. | 114 // T0: new object start as a tagged pointer. |
114 // T1: new object end address. | 115 // T1: new object end address. |
115 // T2: iterator which initially points to the start of the variable | 116 // T2: iterator which initially points to the start of the variable |
116 // data area to be initialized. | 117 // data area to be initialized. |
117 // NULLREG: null | 118 // T7: null |
118 __ AddImmediate(T2, T0, sizeof(RawArray) - kHeapObjectTag); | 119 __ AddImmediate(T2, T0, sizeof(RawArray) - kHeapObjectTag); |
119 | 120 |
120 Label done; | 121 Label done; |
121 Label init_loop; | 122 Label init_loop; |
122 __ Bind(&init_loop); | 123 __ Bind(&init_loop); |
123 __ BranchUnsignedGreaterEqual(T2, T1, &done); | 124 __ BranchUnsignedGreaterEqual(T2, T1, &done); |
124 __ sw(NULLREG, Address(T2, 0)); | 125 __ sw(T7, Address(T2, 0)); |
125 __ b(&init_loop); | 126 __ b(&init_loop); |
126 __ delay_slot()->addiu(T2, T2, Immediate(kWordSize)); | 127 __ delay_slot()->addiu(T2, T2, Immediate(kWordSize)); |
127 __ Bind(&done); | 128 __ Bind(&done); |
128 | 129 |
129 __ Ret(); // Returns the newly allocated object in V0. | 130 __ Ret(); // Returns the newly allocated object in V0. |
130 __ delay_slot()->mov(V0, T0); | 131 __ delay_slot()->mov(V0, T0); |
131 __ Bind(&fall_through); | 132 __ Bind(&fall_through); |
132 return false; | 133 return false; |
133 } | 134 } |
134 | 135 |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
194 Label fall_through; | 195 Label fall_through; |
195 | 196 |
196 if (FLAG_enable_type_checks) { | 197 if (FLAG_enable_type_checks) { |
197 const intptr_t type_args_field_offset = | 198 const intptr_t type_args_field_offset = |
198 ComputeObjectArrayTypeArgumentsOffset(); | 199 ComputeObjectArrayTypeArgumentsOffset(); |
199 // Inline simple tests (Smi, null), fallthrough if not positive. | 200 // Inline simple tests (Smi, null), fallthrough if not positive. |
200 Label checked_ok; | 201 Label checked_ok; |
201 __ lw(T2, Address(SP, 0 * kWordSize)); // Value. | 202 __ lw(T2, Address(SP, 0 * kWordSize)); // Value. |
202 | 203 |
203 // Null value is valid for any type. | 204 // Null value is valid for any type. |
204 __ beq(T2, NULLREG, &checked_ok); | 205 __ LoadImmediate(T7, reinterpret_cast<int32_t>(Object::null())); |
| 206 __ beq(T2, T7, &checked_ok); |
205 __ delay_slot()->lw(T1, Address(SP, 2 * kWordSize)); // Array. | 207 __ delay_slot()->lw(T1, Address(SP, 2 * kWordSize)); // Array. |
206 | 208 |
207 __ lw(T1, FieldAddress(T1, type_args_field_offset)); | 209 __ lw(T1, FieldAddress(T1, type_args_field_offset)); |
208 | 210 |
209 // T1: Type arguments of array. | 211 // T1: Type arguments of array. |
210 __ beq(T1, NULLREG, &checked_ok); | 212 __ beq(T1, T7, &checked_ok); |
211 | 213 |
212 // Check if it's dynamic. | 214 // Check if it's dynamic. |
213 // For now handle only TypeArguments and bail out if InstantiatedTypeArgs. | 215 // For now handle only TypeArguments and bail out if InstantiatedTypeArgs. |
214 __ LoadClassId(TMP, T1); | 216 __ LoadClassId(TMP, T1); |
215 __ BranchNotEqual(TMP, kTypeArgumentsCid, &fall_through); | 217 __ BranchNotEqual(TMP, kTypeArgumentsCid, &fall_through); |
216 | 218 |
217 // Get type at index 0. | 219 // Get type at index 0. |
218 __ lw(T0, FieldAddress(T1, TypeArguments::type_at_offset(0))); | 220 __ lw(T0, FieldAddress(T1, TypeArguments::type_at_offset(0))); |
219 __ BranchEqual(T0, Type::ZoneHandle(Type::DynamicType()), &checked_ok); | 221 __ BranchEqual(T0, Type::ZoneHandle(Type::DynamicType()), &checked_ok); |
220 | 222 |
(...skipping 234 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
455 // len = len + 1; | 457 // len = len + 1; |
456 __ addiu(T3, T1, Immediate(value_one)); | 458 __ addiu(T3, T1, Immediate(value_one)); |
457 __ sw(T3, FieldAddress(T0, GrowableObjectArray::length_offset())); | 459 __ sw(T3, FieldAddress(T0, GrowableObjectArray::length_offset())); |
458 __ lw(T0, Address(SP, 0 * kWordSize)); // Value. | 460 __ lw(T0, Address(SP, 0 * kWordSize)); // Value. |
459 ASSERT(kSmiTagShift == 1); | 461 ASSERT(kSmiTagShift == 1); |
460 __ sll(T1, T1, 1); | 462 __ sll(T1, T1, 1); |
461 __ addu(T1, T2, T1); | 463 __ addu(T1, T2, T1); |
462 __ StoreIntoObject(T2, | 464 __ StoreIntoObject(T2, |
463 FieldAddress(T1, Array::data_offset()), | 465 FieldAddress(T1, Array::data_offset()), |
464 T0); | 466 T0); |
| 467 __ LoadImmediate(T7, reinterpret_cast<int32_t>(Object::null())); |
465 __ Ret(); | 468 __ Ret(); |
466 __ delay_slot()->mov(V0, NULLREG); | 469 __ delay_slot()->mov(V0, T7); |
467 __ Bind(&fall_through); | 470 __ Bind(&fall_through); |
468 return false; | 471 return false; |
469 } | 472 } |
470 | 473 |
471 | 474 |
472 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift) \ | 475 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift) \ |
473 Label fall_through; \ | 476 Label fall_through; \ |
474 const intptr_t kArrayLengthStackOffset = 0 * kWordSize; \ | 477 const intptr_t kArrayLengthStackOffset = 0 * kWordSize; \ |
475 __ lw(T2, Address(SP, kArrayLengthStackOffset)); /* Array length. */ \ | 478 __ lw(T2, Address(SP, kArrayLengthStackOffset)); /* Array length. */ \ |
476 /* Check that length is a positive Smi. */ \ | 479 /* Check that length is a positive Smi. */ \ |
(...skipping 352 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
829 } | 832 } |
830 | 833 |
831 | 834 |
832 bool Intrinsifier::Integer_bitOr(Assembler* assembler) { | 835 bool Intrinsifier::Integer_bitOr(Assembler* assembler) { |
833 return Integer_bitOrFromInteger(assembler); | 836 return Integer_bitOrFromInteger(assembler); |
834 } | 837 } |
835 | 838 |
836 | 839 |
837 bool Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) { | 840 bool Intrinsifier::Integer_bitXorFromInteger(Assembler* assembler) { |
838 Label fall_through; | 841 Label fall_through; |
839 __ Untested("Intrinsifier::Integer_bitXorFromInteger"); | |
840 | 842 |
841 TestBothArgumentsSmis(assembler, &fall_through); // Checks two smis. | 843 TestBothArgumentsSmis(assembler, &fall_through); // Checks two smis. |
842 __ Ret(); | 844 __ Ret(); |
843 __ delay_slot()->xor_(V0, T0, T1); | 845 __ delay_slot()->xor_(V0, T0, T1); |
844 __ Bind(&fall_through); | 846 __ Bind(&fall_through); |
845 return false; | 847 return false; |
846 } | 848 } |
847 | 849 |
848 | 850 |
849 bool Intrinsifier::Integer_bitXor(Assembler* assembler) { | 851 bool Intrinsifier::Integer_bitXor(Assembler* assembler) { |
(...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1131 __ BranchNotEqual(TMP, kDoubleCid, not_double_smi); | 1133 __ BranchNotEqual(TMP, kDoubleCid, not_double_smi); |
1132 // Fall through with Double in T0. | 1134 // Fall through with Double in T0. |
1133 } | 1135 } |
1134 | 1136 |
1135 | 1137 |
1136 // Both arguments on stack, arg0 (left) is a double, arg1 (right) is of unknown | 1138 // Both arguments on stack, arg0 (left) is a double, arg1 (right) is of unknown |
1137 // type. Return true or false object in the register V0. Any NaN argument | 1139 // type. Return true or false object in the register V0. Any NaN argument |
1138 // returns false. Any non-double arg1 causes control flow to fall through to the | 1140 // returns false. Any non-double arg1 causes control flow to fall through to the |
1139 // slow case (compiled method body). | 1141 // slow case (compiled method body). |
1140 static bool CompareDoubles(Assembler* assembler, Condition true_condition) { | 1142 static bool CompareDoubles(Assembler* assembler, Condition true_condition) { |
1141 Label is_smi, no_conversion, no_NaN, fall_through; | 1143 Label is_smi, double_op, no_NaN, fall_through; |
| 1144 __ Comment("CompareDoubles Intrinsic"); |
1142 | 1145 |
1143 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); | 1146 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); |
1144 // Both arguments are double, right operand is in T0. | 1147 // Both arguments are double, right operand is in T0. |
1145 __ lwc1(F2, FieldAddress(T0, Double::value_offset())); | 1148 __ LoadDFromOffset(D1, T0, Double::value_offset() - kHeapObjectTag); |
1146 __ b(&no_conversion); | 1149 __ Bind(&double_op); |
1147 __ delay_slot()->lwc1(F3, | |
1148 FieldAddress(T0, Double::value_offset() + kWordSize)); | |
1149 | |
1150 __ Bind(&is_smi); | |
1151 __ SmiUntag(T0); | |
1152 __ mtc1(T0, F4); | |
1153 __ cvtdw(D1, F4); | |
1154 | |
1155 __ Bind(&no_conversion); | |
1156 __ lw(T0, Address(SP, 1 * kWordSize)); // Left argument. | 1150 __ lw(T0, Address(SP, 1 * kWordSize)); // Left argument. |
1157 __ lwc1(F0, FieldAddress(T0, Double::value_offset())); | 1151 __ LoadDFromOffset(D0, T0, Double::value_offset() - kHeapObjectTag); |
1158 __ lwc1(F1, FieldAddress(T0, Double::value_offset() + kWordSize)); | |
1159 // Now, left is in D0, right is in D1. | 1152 // Now, left is in D0, right is in D1. |
1160 | 1153 |
1161 __ cund(D0, D1); // Check for NaN. | 1154 __ cund(D0, D1); // Check for NaN. |
1162 __ bc1f(&no_NaN); | 1155 __ bc1f(&no_NaN); |
1163 __ LoadObject(V0, Bool::False()); // Return false if either is NaN. | 1156 __ LoadObject(V0, Bool::False()); // Return false if either is NaN. |
1164 __ Ret(); | 1157 __ Ret(); |
1165 __ Bind(&no_NaN); | 1158 __ Bind(&no_NaN); |
1166 | 1159 |
1167 switch (true_condition) { | 1160 switch (true_condition) { |
1168 case EQ: __ ceqd(D1, D0); break; | 1161 case EQ: __ ceqd(D0, D1); break; |
1169 case LT: __ coltd(D1, D0); break; | 1162 case LT: __ coltd(D0, D1); break; |
1170 case LE: __ coled(D1, D0); break; | 1163 case LE: __ coled(D0, D1); break; |
1171 case GT: __ coltd(D0, D1); break; | 1164 case GT: __ coltd(D1, D0); break; |
1172 case GE: __ coled(D0, D1); break; | 1165 case GE: __ coled(D1, D0); break; |
1173 default: { | 1166 default: { |
1174 // Only passing the above conditions to this function. | 1167 // Only passing the above conditions to this function. |
1175 UNREACHABLE(); | 1168 UNREACHABLE(); |
1176 break; | 1169 break; |
1177 } | 1170 } |
1178 } | 1171 } |
1179 | 1172 |
1180 Label is_true; | 1173 Label is_true; |
1181 __ bc1t(&is_true); | 1174 __ bc1t(&is_true); |
1182 __ LoadObject(V0, Bool::False()); | 1175 __ LoadObject(V0, Bool::False()); |
1183 __ Ret(); | 1176 __ Ret(); |
1184 __ Bind(&is_true); | 1177 __ Bind(&is_true); |
1185 __ LoadObject(V0, Bool::True()); | 1178 __ LoadObject(V0, Bool::True()); |
1186 __ Ret(); | 1179 __ Ret(); |
| 1180 |
| 1181 |
| 1182 __ Bind(&is_smi); |
| 1183 __ SmiUntag(T0); |
| 1184 __ mtc1(T0, STMP1); |
| 1185 __ cvtdw(D1, STMP1); |
| 1186 __ b(&double_op); |
| 1187 |
1187 __ Bind(&fall_through); | 1188 __ Bind(&fall_through); |
1188 return false; | 1189 return false; |
1189 } | 1190 } |
1190 | 1191 |
1191 | 1192 |
1192 bool Intrinsifier::Double_greaterThan(Assembler* assembler) { | 1193 bool Intrinsifier::Double_greaterThan(Assembler* assembler) { |
1193 return CompareDoubles(assembler, GT); | 1194 return CompareDoubles(assembler, GT); |
1194 } | 1195 } |
1195 | 1196 |
1196 | 1197 |
(...skipping 64 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1261 | 1262 |
1262 | 1263 |
1263 bool Intrinsifier::Double_div(Assembler* assembler) { | 1264 bool Intrinsifier::Double_div(Assembler* assembler) { |
1264 return DoubleArithmeticOperations(assembler, Token::kDIV); | 1265 return DoubleArithmeticOperations(assembler, Token::kDIV); |
1265 } | 1266 } |
1266 | 1267 |
1267 | 1268 |
1268 // Left is double right is integer (Bigint, Mint or Smi) | 1269 // Left is double right is integer (Bigint, Mint or Smi) |
1269 bool Intrinsifier::Double_mulFromInteger(Assembler* assembler) { | 1270 bool Intrinsifier::Double_mulFromInteger(Assembler* assembler) { |
1270 Label fall_through; | 1271 Label fall_through; |
1271 __ Untested("Intrinsifier::Double_mulFromInteger"); | |
1272 // Only Smi-s allowed. | 1272 // Only Smi-s allowed. |
1273 __ lw(T0, Address(SP, 0 * kWordSize)); | 1273 __ lw(T0, Address(SP, 0 * kWordSize)); |
1274 __ andi(CMPRES, T0, Immediate(kSmiTagMask)); | 1274 __ andi(CMPRES, T0, Immediate(kSmiTagMask)); |
1275 __ bne(CMPRES, ZR, &fall_through); | 1275 __ bne(CMPRES, ZR, &fall_through); |
1276 | 1276 |
1277 // Is Smi. | 1277 // Is Smi. |
1278 __ SmiUntag(T0); | 1278 __ SmiUntag(T0); |
1279 __ mtc1(T0, F4); | 1279 __ mtc1(T0, F4); |
1280 __ cvtdw(D1, F4); | 1280 __ cvtdw(D1, F4); |
1281 | 1281 |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1313 __ Ret(); | 1313 __ Ret(); |
1314 __ delay_slot()->swc1(F1, | 1314 __ delay_slot()->swc1(F1, |
1315 FieldAddress(V0, Double::value_offset() + kWordSize)); | 1315 FieldAddress(V0, Double::value_offset() + kWordSize)); |
1316 __ Bind(&fall_through); | 1316 __ Bind(&fall_through); |
1317 return false; | 1317 return false; |
1318 } | 1318 } |
1319 | 1319 |
1320 | 1320 |
1321 bool Intrinsifier::Double_getIsNaN(Assembler* assembler) { | 1321 bool Intrinsifier::Double_getIsNaN(Assembler* assembler) { |
1322 Label is_true; | 1322 Label is_true; |
1323 __ Untested("Intrinsifier::Double_getIsNaN"); | 1323 |
1324 __ lw(T0, Address(SP, 0 * kWordSize)); | 1324 __ lw(T0, Address(SP, 0 * kWordSize)); |
1325 __ lwc1(F0, FieldAddress(T0, Double::value_offset())); | 1325 __ lwc1(F0, FieldAddress(T0, Double::value_offset())); |
1326 __ lwc1(F1, FieldAddress(T0, Double::value_offset() + kWordSize)); | 1326 __ lwc1(F1, FieldAddress(T0, Double::value_offset() + kWordSize)); |
1327 __ cund(D0, D0); // Check for NaN. | 1327 __ cund(D0, D0); // Check for NaN. |
1328 __ bc1t(&is_true); | 1328 __ bc1t(&is_true); |
1329 __ LoadObject(V0, Bool::False()); // Return false if either is NaN. | 1329 __ LoadObject(V0, Bool::False()); // Return false if either is NaN. |
1330 __ Ret(); | 1330 __ Ret(); |
1331 __ Bind(&is_true); | 1331 __ Bind(&is_true); |
1332 __ LoadObject(V0, Bool::True()); | 1332 __ LoadObject(V0, Bool::True()); |
1333 __ Ret(); | 1333 __ Ret(); |
1334 return true; | 1334 return true; |
1335 } | 1335 } |
1336 | 1336 |
1337 | 1337 |
1338 bool Intrinsifier::Double_getIsNegative(Assembler* assembler) { | 1338 bool Intrinsifier::Double_getIsNegative(Assembler* assembler) { |
1339 Label is_false, is_true, is_zero; | 1339 Label is_false, is_true, is_zero; |
1340 __ Untested("Intrinsifier::Double_getIsNegative"); | |
1341 __ lw(T0, Address(SP, 0 * kWordSize)); | 1340 __ lw(T0, Address(SP, 0 * kWordSize)); |
1342 __ lwc1(F0, FieldAddress(T0, Double::value_offset())); | 1341 __ lwc1(F0, FieldAddress(T0, Double::value_offset())); |
1343 __ lwc1(F1, FieldAddress(T0, Double::value_offset() + kWordSize)); | 1342 __ lwc1(F1, FieldAddress(T0, Double::value_offset() + kWordSize)); |
1344 | 1343 |
1345 __ cund(D0, D0); | 1344 __ cund(D0, D0); |
1346 __ bc1t(&is_false); // NaN -> false. | 1345 __ bc1t(&is_false); // NaN -> false. |
1347 | 1346 |
1348 __ ceqd(D0, D1); | 1347 __ ceqd(D0, D1); |
1349 __ bc1t(&is_zero); // Check for negative zero. | 1348 __ bc1t(&is_zero); // Check for negative zero. |
1350 | 1349 |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1383 __ BranchSignedLess(V0, 0xC0000000, &fall_through); | 1382 __ BranchSignedLess(V0, 0xC0000000, &fall_through); |
1384 __ Ret(); | 1383 __ Ret(); |
1385 __ delay_slot()->SmiTag(V0); | 1384 __ delay_slot()->SmiTag(V0); |
1386 __ Bind(&fall_through); | 1385 __ Bind(&fall_through); |
1387 return false; | 1386 return false; |
1388 } | 1387 } |
1389 | 1388 |
1390 | 1389 |
1391 bool Intrinsifier::Math_sqrt(Assembler* assembler) { | 1390 bool Intrinsifier::Math_sqrt(Assembler* assembler) { |
1392 Label fall_through, is_smi, double_op; | 1391 Label fall_through, is_smi, double_op; |
1393 __ Untested("Intrinsifier::Math_sqrt"); | |
1394 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); | 1392 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); |
1395 // Argument is double and is in T0. | 1393 // Argument is double and is in T0. |
1396 __ lwc1(F0, FieldAddress(T0, Double::value_offset())); | 1394 __ LoadDFromOffset(D1, T0, Double::value_offset() - kHeapObjectTag); |
1397 __ lwc1(F1, FieldAddress(T0, Double::value_offset() + kWordSize)); | |
1398 __ Bind(&double_op); | 1395 __ Bind(&double_op); |
1399 __ sqrtd(D0, D1); | 1396 __ sqrtd(D0, D1); |
1400 const Class& double_class = Class::Handle( | 1397 const Class& double_class = Class::Handle( |
1401 Isolate::Current()->object_store()->double_class()); | 1398 Isolate::Current()->object_store()->double_class()); |
1402 __ TryAllocate(double_class, &fall_through, V0); // Result register. | 1399 __ TryAllocate(double_class, &fall_through, V0); // Result register. |
1403 __ swc1(F0, FieldAddress(V0, Double::value_offset())); | 1400 __ swc1(F0, FieldAddress(V0, Double::value_offset())); |
1404 __ Ret(); | 1401 __ Ret(); |
1405 __ delay_slot()->swc1(F1, | 1402 __ delay_slot()->swc1(F1, |
1406 FieldAddress(V0, Double::value_offset() + kWordSize)); | 1403 FieldAddress(V0, Double::value_offset() + kWordSize)); |
1407 | 1404 |
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1440 const Field& random_A_field = Field::ZoneHandle( | 1437 const Field& random_A_field = Field::ZoneHandle( |
1441 random_class.LookupStaticField(Symbols::_A())); | 1438 random_class.LookupStaticField(Symbols::_A())); |
1442 ASSERT(!random_A_field.IsNull()); | 1439 ASSERT(!random_A_field.IsNull()); |
1443 ASSERT(random_A_field.is_const()); | 1440 ASSERT(random_A_field.is_const()); |
1444 const Instance& a_value = Instance::Handle(random_A_field.value()); | 1441 const Instance& a_value = Instance::Handle(random_A_field.value()); |
1445 const int64_t a_int_value = Integer::Cast(a_value).AsInt64Value(); | 1442 const int64_t a_int_value = Integer::Cast(a_value).AsInt64Value(); |
1446 // 'a_int_value' is a mask. | 1443 // 'a_int_value' is a mask. |
1447 ASSERT(Utils::IsUint(32, a_int_value)); | 1444 ASSERT(Utils::IsUint(32, a_int_value)); |
1448 int32_t a_int32_value = static_cast<int32_t>(a_int_value); | 1445 int32_t a_int32_value = static_cast<int32_t>(a_int_value); |
1449 | 1446 |
1450 __ Untested("Random_nextState"); | |
1451 | |
1452 __ lw(T0, Address(SP, 0 * kWordSize)); // Receiver. | 1447 __ lw(T0, Address(SP, 0 * kWordSize)); // Receiver. |
1453 __ lw(T1, FieldAddress(T0, state_field.Offset())); // Field '_state'. | 1448 __ lw(T1, FieldAddress(T0, state_field.Offset())); // Field '_state'. |
1454 | 1449 |
1455 // Addresses of _state[0] and _state[1]. | 1450 // Addresses of _state[0] and _state[1]. |
1456 const int64_t disp_0 = | 1451 const int64_t disp_0 = |
1457 FlowGraphCompiler::DataOffsetFor(kTypedDataUint32ArrayCid); | 1452 FlowGraphCompiler::DataOffsetFor(kTypedDataUint32ArrayCid); |
1458 | 1453 |
1459 const int64_t disp_1 = | 1454 const int64_t disp_1 = |
1460 FlowGraphCompiler::ElementSizeFor(kTypedDataUint32ArrayCid) + | 1455 FlowGraphCompiler::ElementSizeFor(kTypedDataUint32ArrayCid) + |
1461 FlowGraphCompiler::DataOffsetFor(kTypedDataUint32ArrayCid); | 1456 FlowGraphCompiler::DataOffsetFor(kTypedDataUint32ArrayCid); |
(...skipping 310 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1772 __ Bind(&ok); | 1767 __ Bind(&ok); |
1773 __ Ret(); | 1768 __ Ret(); |
1774 | 1769 |
1775 __ Bind(&fall_through); | 1770 __ Bind(&fall_through); |
1776 return false; | 1771 return false; |
1777 } | 1772 } |
1778 | 1773 |
1779 } // namespace dart | 1774 } // namespace dart |
1780 | 1775 |
1781 #endif // defined TARGET_ARCH_MIPS | 1776 #endif // defined TARGET_ARCH_MIPS |
OLD | NEW |