Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(264)

Side by Side Diff: runtime/vm/intermediate_language_arm.cc

Issue 408373002: Adds intrinsics for Float64Array [] and []=. (Closed) Base URL: http://dart.googlecode.com/svn/branches/bleeding_edge/dart/
Patch Set: Created 6 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « runtime/vm/assembler_x64.cc ('k') | runtime/vm/intermediate_language_arm64.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file 1 // Copyright (c) 2013, the Dart project authors. Please see the AUTHORS file
2 // for details. All rights reserved. Use of this source code is governed by a 2 // for details. All rights reserved. Use of this source code is governed by a
3 // BSD-style license that can be found in the LICENSE file. 3 // BSD-style license that can be found in the LICENSE file.
4 4
5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM. 5 #include "vm/globals.h" // Needed here to get TARGET_ARCH_ARM.
6 #if defined(TARGET_ARCH_ARM) 6 #if defined(TARGET_ARCH_ARM)
7 7
8 #include "vm/intermediate_language.h" 8 #include "vm/intermediate_language.h"
9 9
10 #include "vm/cpu.h" 10 #include "vm/cpu.h"
(...skipping 1122 matching lines...) Expand 10 before | Expand all | Expand 10 after
1133 return kUnboxedFloat32x4; 1133 return kUnboxedFloat32x4;
1134 case kTypedDataFloat64x2ArrayCid: 1134 case kTypedDataFloat64x2ArrayCid:
1135 return kUnboxedFloat64x2; 1135 return kUnboxedFloat64x2;
1136 default: 1136 default:
1137 UNREACHABLE(); 1137 UNREACHABLE();
1138 return kTagged; 1138 return kTagged;
1139 } 1139 }
1140 } 1140 }
1141 1141
1142 1142
1143 static bool CanHoldImmediateOffset(bool is_load, intptr_t cid, int64_t offset) {
1144 int32_t offset_mask = 0;
1145 if (is_load) {
1146 return Address::CanHoldLoadOffset(Address::OperandSizeFor(cid),
1147 offset,
1148 &offset_mask);
1149 } else {
1150 return Address::CanHoldStoreOffset(Address::OperandSizeFor(cid),
1151 offset,
1152 &offset_mask);
1153 }
1154 }
1155
1156 static bool CanBeImmediateIndex(Value* value, 1143 static bool CanBeImmediateIndex(Value* value,
1157 intptr_t cid, 1144 intptr_t cid,
1158 bool is_external, 1145 bool is_external,
1159 bool is_load, 1146 bool is_load,
1160 bool* needs_base) { 1147 bool* needs_base) {
1161 if ((cid == kTypedDataInt32x4ArrayCid) || 1148 if ((cid == kTypedDataInt32x4ArrayCid) ||
1162 (cid == kTypedDataFloat32x4ArrayCid) || 1149 (cid == kTypedDataFloat32x4ArrayCid) ||
1163 (cid == kTypedDataFloat64x2ArrayCid)) { 1150 (cid == kTypedDataFloat64x2ArrayCid)) {
1164 // We are using vldmd/vstmd which do not support offset. 1151 // We are using vldmd/vstmd which do not support offset.
1165 return false; 1152 return false;
1166 } 1153 }
1167 1154
1168 ConstantInstr* constant = value->definition()->AsConstant(); 1155 ConstantInstr* constant = value->definition()->AsConstant();
1169 if ((constant == NULL) || !Assembler::IsSafeSmi(constant->value())) { 1156 if ((constant == NULL) || !Assembler::IsSafeSmi(constant->value())) {
1170 return false; 1157 return false;
1171 } 1158 }
1172 const int64_t index = Smi::Cast(constant->value()).AsInt64Value(); 1159 const int64_t index = Smi::Cast(constant->value()).AsInt64Value();
1173 const intptr_t scale = Instance::ElementSizeFor(cid); 1160 const intptr_t scale = Instance::ElementSizeFor(cid);
1174 const intptr_t base_offset = 1161 const intptr_t base_offset =
1175 (is_external ? 0 : (Instance::DataOffsetFor(cid) - kHeapObjectTag)); 1162 (is_external ? 0 : (Instance::DataOffsetFor(cid) - kHeapObjectTag));
1176 const int64_t offset = index * scale + base_offset; 1163 const int64_t offset = index * scale + base_offset;
1177 if (!Utils::IsAbsoluteUint(12, offset)) { 1164 if (!Utils::IsAbsoluteUint(12, offset)) {
1178 return false; 1165 return false;
1179 } 1166 }
1180 if (CanHoldImmediateOffset(is_load, cid, offset)) { 1167 if (Address::CanHoldImmediateOffset(is_load, cid, offset)) {
1181 *needs_base = false; 1168 *needs_base = false;
1182 return true; 1169 return true;
1183 } 1170 }
1184 1171
1185 if (CanHoldImmediateOffset(is_load, cid, offset - base_offset)) { 1172 if (Address::CanHoldImmediateOffset(is_load, cid, offset - base_offset)) {
1186 *needs_base = true; 1173 *needs_base = true;
1187 return true; 1174 return true;
1188 } 1175 }
1189 1176
1190 return false; 1177 return false;
1191 } 1178 }
1192 1179
1193 1180
1194 LocationSummary* LoadIndexedInstr::MakeLocationSummary(Isolate* isolate, 1181 LocationSummary* LoadIndexedInstr::MakeLocationSummary(Isolate* isolate,
1195 bool opt) const { 1182 bool opt) const {
(...skipping 27 matching lines...) Expand all
1223 locs->set_out(0, Location::Pair(Location::RequiresRegister(), 1210 locs->set_out(0, Location::Pair(Location::RequiresRegister(),
1224 Location::RequiresRegister())); 1211 Location::RequiresRegister()));
1225 } else { 1212 } else {
1226 ASSERT(representation() == kTagged); 1213 ASSERT(representation() == kTagged);
1227 locs->set_out(0, Location::RequiresRegister()); 1214 locs->set_out(0, Location::RequiresRegister());
1228 } 1215 }
1229 return locs; 1216 return locs;
1230 } 1217 }
1231 1218
1232 1219
1233 static Address ElementAddressForIntIndex(Assembler* assembler,
1234 bool is_load,
1235 bool is_external,
1236 intptr_t cid,
1237 intptr_t index_scale,
1238 Register array,
1239 intptr_t index,
1240 Register temp) {
1241 const int64_t offset_base =
1242 (is_external ? 0 : (Instance::DataOffsetFor(cid) - kHeapObjectTag));
1243 const int64_t offset = offset_base +
1244 static_cast<int64_t>(index) * index_scale;
1245 ASSERT(Utils::IsInt(32, offset));
1246
1247 if (CanHoldImmediateOffset(is_load, cid, offset)) {
1248 return Address(array, static_cast<int32_t>(offset));
1249 } else {
1250 ASSERT(CanHoldImmediateOffset(is_load, cid, offset - offset_base));
1251 assembler->AddImmediate(
1252 temp, array, static_cast<int32_t>(offset_base));
1253 return Address(temp, static_cast<int32_t>(offset - offset_base));
1254 }
1255 }
1256
1257
1258 static Address ElementAddressForRegIndex(Assembler* assembler,
1259 bool is_load,
1260 bool is_external,
1261 intptr_t cid,
1262 intptr_t index_scale,
1263 Register array,
1264 Register index) {
1265 // Note that index is expected smi-tagged, (i.e, LSL 1) for all arrays.
1266 const intptr_t shift = Utils::ShiftForPowerOfTwo(index_scale) - kSmiTagShift;
1267 int32_t offset =
1268 is_external ? 0 : (Instance::DataOffsetFor(cid) - kHeapObjectTag);
1269 const OperandSize size = Address::OperandSizeFor(cid);
1270 ASSERT(array != IP);
1271 ASSERT(index != IP);
1272 const Register base = is_load ? IP : index;
1273 if ((offset != 0) ||
1274 (size == kSWord) || (size == kDWord) || (size == kRegList)) {
1275 if (shift < 0) {
1276 ASSERT(shift == -1);
1277 assembler->add(base, array, Operand(index, ASR, 1));
1278 } else {
1279 assembler->add(base, array, Operand(index, LSL, shift));
1280 }
1281 } else {
1282 if (shift < 0) {
1283 ASSERT(shift == -1);
1284 return Address(array, index, ASR, 1);
1285 } else {
1286 return Address(array, index, LSL, shift);
1287 }
1288 }
1289 int32_t offset_mask = 0;
1290 if ((is_load && !Address::CanHoldLoadOffset(size,
1291 offset,
1292 &offset_mask)) ||
1293 (!is_load && !Address::CanHoldStoreOffset(size,
1294 offset,
1295 &offset_mask))) {
1296 assembler->AddImmediate(base, offset & ~offset_mask);
1297 offset = offset & offset_mask;
1298 }
1299 return Address(base, offset);
1300 }
1301
1302
1303 void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1220 void LoadIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1304 // The array register points to the backing store for external arrays. 1221 // The array register points to the backing store for external arrays.
1305 const Register array = locs()->in(0).reg(); 1222 const Register array = locs()->in(0).reg();
1306 const Location index = locs()->in(1); 1223 const Location index = locs()->in(1);
1307 1224
1308 Address element_address = index.IsRegister() 1225 Address element_address = index.IsRegister()
1309 ? ElementAddressForRegIndex(compiler->assembler(), 1226 ? __ ElementAddressForRegIndex(true, // Load.
1310 true, // Load. 1227 IsExternal(), class_id(), index_scale(),
1311 IsExternal(), class_id(), index_scale(), 1228 array,
1312 array, index.reg()) 1229 index.reg())
1313 : ElementAddressForIntIndex(compiler->assembler(), 1230 : __ ElementAddressForIntIndex(true, // Load.
1314 true, // Load. 1231 IsExternal(), class_id(), index_scale(),
1315 IsExternal(), class_id(), index_scale(), 1232 array, Smi::Cast(index.constant()).Value(),
1316 array, Smi::Cast(index.constant()).Value(), 1233 IP); // Temp register.
1317 IP); // Temp register.
1318 // Warning: element_address may use register IP as base. 1234 // Warning: element_address may use register IP as base.
1319 1235
1320 if ((representation() == kUnboxedDouble) || 1236 if ((representation() == kUnboxedDouble) ||
1321 (representation() == kUnboxedFloat32x4) || 1237 (representation() == kUnboxedFloat32x4) ||
1322 (representation() == kUnboxedInt32x4) || 1238 (representation() == kUnboxedInt32x4) ||
1323 (representation() == kUnboxedFloat64x2)) { 1239 (representation() == kUnboxedFloat64x2)) {
1324 const QRegister result = locs()->out(0).fpu_reg(); 1240 const QRegister result = locs()->out(0).fpu_reg();
1325 const DRegister dresult0 = EvenDRegisterOf(result); 1241 const DRegister dresult0 = EvenDRegisterOf(result);
1326 switch (class_id()) { 1242 switch (class_id()) {
1327 case kTypedDataFloat32ArrayCid: 1243 case kTypedDataFloat32ArrayCid:
(...skipping 207 matching lines...) Expand 10 before | Expand all | Expand 10 after
1535 1451
1536 1452
1537 void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { 1453 void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
1538 // The array register points to the backing store for external arrays. 1454 // The array register points to the backing store for external arrays.
1539 const Register array = locs()->in(0).reg(); 1455 const Register array = locs()->in(0).reg();
1540 const Location index = locs()->in(1); 1456 const Location index = locs()->in(1);
1541 const Register temp = 1457 const Register temp =
1542 (locs()->temp_count() > 0) ? locs()->temp(0).reg() : kNoRegister; 1458 (locs()->temp_count() > 0) ? locs()->temp(0).reg() : kNoRegister;
1543 1459
1544 Address element_address = index.IsRegister() 1460 Address element_address = index.IsRegister()
1545 ? ElementAddressForRegIndex(compiler->assembler(), 1461 ? __ ElementAddressForRegIndex(false, // Store.
1546 false, // Store. 1462 IsExternal(), class_id(), index_scale(),
1547 IsExternal(), class_id(), index_scale(), 1463 array,
1548 array, index.reg()) 1464 index.reg())
1549 : ElementAddressForIntIndex(compiler->assembler(), 1465 : __ ElementAddressForIntIndex(false, // Store.
1550 false, // Store. 1466 IsExternal(), class_id(), index_scale(),
1551 IsExternal(), class_id(), index_scale(), 1467 array, Smi::Cast(index.constant()).Value(),
1552 array, Smi::Cast(index.constant()).Value(), 1468 temp);
1553 temp);
1554 1469
1555 switch (class_id()) { 1470 switch (class_id()) {
1556 case kArrayCid: 1471 case kArrayCid:
1557 if (ShouldEmitStoreBarrier()) { 1472 if (ShouldEmitStoreBarrier()) {
1558 const Register value = locs()->in(2).reg(); 1473 const Register value = locs()->in(2).reg();
1559 __ StoreIntoObject(array, element_address, value); 1474 __ StoreIntoObject(array, element_address, value);
1560 } else if (locs()->in(2).IsConstant()) { 1475 } else if (locs()->in(2).IsConstant()) {
1561 const Object& constant = locs()->in(2).constant(); 1476 const Object& constant = locs()->in(2).constant();
1562 __ StoreIntoObjectNoBarrier(array, element_address, constant); 1477 __ StoreIntoObjectNoBarrier(array, element_address, constant);
1563 } else { 1478 } else {
(...skipping 5325 matching lines...) Expand 10 before | Expand all | Expand 10 after
6889 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs()); 6804 compiler->GenerateCall(token_pos(), &label, stub_kind_, locs());
6890 #if defined(DEBUG) 6805 #if defined(DEBUG)
6891 __ LoadImmediate(R4, kInvalidObjectPointer); 6806 __ LoadImmediate(R4, kInvalidObjectPointer);
6892 __ LoadImmediate(R5, kInvalidObjectPointer); 6807 __ LoadImmediate(R5, kInvalidObjectPointer);
6893 #endif 6808 #endif
6894 } 6809 }
6895 6810
6896 } // namespace dart 6811 } // namespace dart
6897 6812
6898 #endif // defined TARGET_ARCH_ARM 6813 #endif // defined TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « runtime/vm/assembler_x64.cc ('k') | runtime/vm/intermediate_language_arm64.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698