| OLD | NEW |
| 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file | 1 // Copyright (c) 2014, the Dart project authors. Please see the AUTHORS file |
| 2 // for details. All rights reserved. Use of this source code is governed by a | 2 // for details. All rights reserved. Use of this source code is governed by a |
| 3 // BSD-style license that can be found in the LICENSE file. | 3 // BSD-style license that can be found in the LICENSE file. |
| 4 | 4 |
| 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. | 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM64. |
| 6 #if defined(TARGET_ARCH_ARM64) | 6 #if defined(TARGET_ARCH_ARM64) |
| 7 | 7 |
| 8 #include "vm/intrinsifier.h" | 8 #include "vm/intrinsifier.h" |
| 9 | 9 |
| 10 #include "vm/assembler.h" | 10 #include "vm/assembler.h" |
| (...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 420 R1, // Array. | 420 R1, // Array. |
| 421 R0); // Index. | 421 R0); // Index. |
| 422 | 422 |
| 423 __ fldrd(V0, element_address); | 423 __ fldrd(V0, element_address); |
| 424 | 424 |
| 425 const Class& double_class = Class::Handle( | 425 const Class& double_class = Class::Handle( |
| 426 Isolate::Current()->object_store()->double_class()); | 426 Isolate::Current()->object_store()->double_class()); |
| 427 __ TryAllocate(double_class, | 427 __ TryAllocate(double_class, |
| 428 &fall_through, | 428 &fall_through, |
| 429 R0, // Result register. | 429 R0, // Result register. |
| 430 R1, // Temp register. |
| 430 kNoPP); | 431 kNoPP); |
| 431 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); | 432 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); |
| 432 __ ret(); | 433 __ ret(); |
| 433 __ Bind(&fall_through); | 434 __ Bind(&fall_through); |
| 434 } | 435 } |
| 435 | 436 |
| 436 | 437 |
| 437 void Intrinsifier::Float64Array_setIndexed(Assembler* assembler) { | 438 void Intrinsifier::Float64Array_setIndexed(Assembler* assembler) { |
| 438 Label fall_through; | 439 Label fall_through; |
| 439 __ ldr(R0, Address(SP, + 1 * kWordSize)); // Index. | 440 __ ldr(R0, Address(SP, + 1 * kWordSize)); // Index. |
| (...skipping 627 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1067 __ LoadDFieldFromOffset(V0, R0, Double::value_offset(), kNoPP); | 1068 __ LoadDFieldFromOffset(V0, R0, Double::value_offset(), kNoPP); |
| 1068 switch (kind) { | 1069 switch (kind) { |
| 1069 case Token::kADD: __ faddd(V0, V0, V1); break; | 1070 case Token::kADD: __ faddd(V0, V0, V1); break; |
| 1070 case Token::kSUB: __ fsubd(V0, V0, V1); break; | 1071 case Token::kSUB: __ fsubd(V0, V0, V1); break; |
| 1071 case Token::kMUL: __ fmuld(V0, V0, V1); break; | 1072 case Token::kMUL: __ fmuld(V0, V0, V1); break; |
| 1072 case Token::kDIV: __ fdivd(V0, V0, V1); break; | 1073 case Token::kDIV: __ fdivd(V0, V0, V1); break; |
| 1073 default: UNREACHABLE(); | 1074 default: UNREACHABLE(); |
| 1074 } | 1075 } |
| 1075 const Class& double_class = Class::Handle( | 1076 const Class& double_class = Class::Handle( |
| 1076 Isolate::Current()->object_store()->double_class()); | 1077 Isolate::Current()->object_store()->double_class()); |
| 1077 __ TryAllocate(double_class, &fall_through, R0, kNoPP); | 1078 __ TryAllocate(double_class, &fall_through, R0, R1, kNoPP); |
| 1078 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); | 1079 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); |
| 1079 __ ret(); | 1080 __ ret(); |
| 1080 __ Bind(&fall_through); | 1081 __ Bind(&fall_through); |
| 1081 } | 1082 } |
| 1082 | 1083 |
| 1083 | 1084 |
| 1084 void Intrinsifier::Double_add(Assembler* assembler) { | 1085 void Intrinsifier::Double_add(Assembler* assembler) { |
| 1085 DoubleArithmeticOperations(assembler, Token::kADD); | 1086 DoubleArithmeticOperations(assembler, Token::kADD); |
| 1086 } | 1087 } |
| 1087 | 1088 |
| (...skipping 21 matching lines...) Expand all Loading... |
| 1109 __ tsti(R0, kSmiTagMask); | 1110 __ tsti(R0, kSmiTagMask); |
| 1110 __ b(&fall_through, NE); | 1111 __ b(&fall_through, NE); |
| 1111 // Is Smi. | 1112 // Is Smi. |
| 1112 __ SmiUntag(R0); | 1113 __ SmiUntag(R0); |
| 1113 __ scvtfd(V1, R0); | 1114 __ scvtfd(V1, R0); |
| 1114 __ ldr(R0, Address(SP, 1 * kWordSize)); | 1115 __ ldr(R0, Address(SP, 1 * kWordSize)); |
| 1115 __ LoadDFieldFromOffset(V0, R0, Double::value_offset(), kNoPP); | 1116 __ LoadDFieldFromOffset(V0, R0, Double::value_offset(), kNoPP); |
| 1116 __ fmuld(V0, V0, V1); | 1117 __ fmuld(V0, V0, V1); |
| 1117 const Class& double_class = Class::Handle( | 1118 const Class& double_class = Class::Handle( |
| 1118 Isolate::Current()->object_store()->double_class()); | 1119 Isolate::Current()->object_store()->double_class()); |
| 1119 __ TryAllocate(double_class, &fall_through, R0, kNoPP); | 1120 __ TryAllocate(double_class, &fall_through, R0, R1, kNoPP); |
| 1120 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); | 1121 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); |
| 1121 __ ret(); | 1122 __ ret(); |
| 1122 __ Bind(&fall_through); | 1123 __ Bind(&fall_through); |
| 1123 } | 1124 } |
| 1124 | 1125 |
| 1125 | 1126 |
| 1126 void Intrinsifier::Double_fromInteger(Assembler* assembler) { | 1127 void Intrinsifier::Double_fromInteger(Assembler* assembler) { |
| 1127 Label fall_through; | 1128 Label fall_through; |
| 1128 | 1129 |
| 1129 __ ldr(R0, Address(SP, 0 * kWordSize)); | 1130 __ ldr(R0, Address(SP, 0 * kWordSize)); |
| 1130 __ tsti(R0, kSmiTagMask); | 1131 __ tsti(R0, kSmiTagMask); |
| 1131 __ b(&fall_through, NE); | 1132 __ b(&fall_through, NE); |
| 1132 // Is Smi. | 1133 // Is Smi. |
| 1133 __ SmiUntag(R0); | 1134 __ SmiUntag(R0); |
| 1134 __ scvtfd(V0, R0); | 1135 __ scvtfd(V0, R0); |
| 1135 const Class& double_class = Class::Handle( | 1136 const Class& double_class = Class::Handle( |
| 1136 Isolate::Current()->object_store()->double_class()); | 1137 Isolate::Current()->object_store()->double_class()); |
| 1137 __ TryAllocate(double_class, &fall_through, R0, kNoPP); | 1138 __ TryAllocate(double_class, &fall_through, R0, R1, kNoPP); |
| 1138 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); | 1139 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); |
| 1139 __ ret(); | 1140 __ ret(); |
| 1140 __ Bind(&fall_through); | 1141 __ Bind(&fall_through); |
| 1141 } | 1142 } |
| 1142 | 1143 |
| 1143 | 1144 |
| 1144 void Intrinsifier::Double_getIsNaN(Assembler* assembler) { | 1145 void Intrinsifier::Double_getIsNaN(Assembler* assembler) { |
| 1145 Label is_true; | 1146 Label is_true; |
| 1146 __ ldr(R0, Address(SP, 0 * kWordSize)); | 1147 __ ldr(R0, Address(SP, 0 * kWordSize)); |
| 1147 __ LoadDFieldFromOffset(V0, R0, Double::value_offset(), kNoPP); | 1148 __ LoadDFieldFromOffset(V0, R0, Double::value_offset(), kNoPP); |
| (...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1207 | 1208 |
| 1208 void Intrinsifier::Math_sqrt(Assembler* assembler) { | 1209 void Intrinsifier::Math_sqrt(Assembler* assembler) { |
| 1209 Label fall_through, is_smi, double_op; | 1210 Label fall_through, is_smi, double_op; |
| 1210 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); | 1211 TestLastArgumentIsDouble(assembler, &is_smi, &fall_through); |
| 1211 // Argument is double and is in R0. | 1212 // Argument is double and is in R0. |
| 1212 __ LoadDFieldFromOffset(V1, R0, Double::value_offset(), kNoPP); | 1213 __ LoadDFieldFromOffset(V1, R0, Double::value_offset(), kNoPP); |
| 1213 __ Bind(&double_op); | 1214 __ Bind(&double_op); |
| 1214 __ fsqrtd(V0, V1); | 1215 __ fsqrtd(V0, V1); |
| 1215 const Class& double_class = Class::Handle( | 1216 const Class& double_class = Class::Handle( |
| 1216 Isolate::Current()->object_store()->double_class()); | 1217 Isolate::Current()->object_store()->double_class()); |
| 1217 __ TryAllocate(double_class, &fall_through, R0, kNoPP); | 1218 __ TryAllocate(double_class, &fall_through, R0, R1, kNoPP); |
| 1218 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); | 1219 __ StoreDFieldToOffset(V0, R0, Double::value_offset(), kNoPP); |
| 1219 __ ret(); | 1220 __ ret(); |
| 1220 __ Bind(&is_smi); | 1221 __ Bind(&is_smi); |
| 1221 __ SmiUntag(R0); | 1222 __ SmiUntag(R0); |
| 1222 __ scvtfd(V1, R0); | 1223 __ scvtfd(V1, R0); |
| 1223 __ b(&double_op); | 1224 __ b(&double_op); |
| 1224 __ Bind(&fall_through); | 1225 __ Bind(&fall_through); |
| 1225 } | 1226 } |
| 1226 | 1227 |
| 1227 | 1228 |
| (...skipping 438 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1666 Isolate* isolate = Isolate::Current(); | 1667 Isolate* isolate = Isolate::Current(); |
| 1667 __ LoadImmediate(R1, reinterpret_cast<uword>(isolate), kNoPP); | 1668 __ LoadImmediate(R1, reinterpret_cast<uword>(isolate), kNoPP); |
| 1668 // Set return value to Isolate::current_tag_. | 1669 // Set return value to Isolate::current_tag_. |
| 1669 __ ldr(R0, Address(R1, Isolate::current_tag_offset())); | 1670 __ ldr(R0, Address(R1, Isolate::current_tag_offset())); |
| 1670 __ ret(); | 1671 __ ret(); |
| 1671 } | 1672 } |
| 1672 | 1673 |
| 1673 } // namespace dart | 1674 } // namespace dart |
| 1674 | 1675 |
| 1675 #endif // defined TARGET_ARCH_ARM64 | 1676 #endif // defined TARGET_ARCH_ARM64 |
| OLD | NEW |