Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(294)

Side by Side Diff: src/compiler/arm/code-generator-arm.cc

Issue 2176173003: [Turbofan] Revert FP register aliasing support on Arm. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Back out float register slots for Arm. Created 4 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | src/compiler/instruction.h » ('j') | src/compiler/register-allocator.cc » ('J')
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/compiler/code-generator.h" 5 #include "src/compiler/code-generator.h"
6 6
7 #include "src/arm/macro-assembler-arm.h" 7 #include "src/arm/macro-assembler-arm.h"
8 #include "src/ast/scopes.h" 8 #include "src/ast/scopes.h"
9 #include "src/compiler/code-generator-impl.h" 9 #include "src/compiler/code-generator-impl.h"
10 #include "src/compiler/gap-resolver.h" 10 #include "src/compiler/gap-resolver.h"
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
129 MemOperand ToMemOperand(InstructionOperand* op) const { 129 MemOperand ToMemOperand(InstructionOperand* op) const {
130 DCHECK_NOT_NULL(op); 130 DCHECK_NOT_NULL(op);
131 DCHECK(op->IsStackSlot() || op->IsFPStackSlot()); 131 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
132 return SlotToMemOperand(AllocatedOperand::cast(op)->index()); 132 return SlotToMemOperand(AllocatedOperand::cast(op)->index());
133 } 133 }
134 134
135 MemOperand SlotToMemOperand(int slot) const { 135 MemOperand SlotToMemOperand(int slot) const {
136 FrameOffset offset = frame_access_state()->GetFrameOffset(slot); 136 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
137 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset()); 137 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
138 } 138 }
139
140 FloatRegister InputFloat32Register(size_t index) {
141 return ToFloat32Register(instr_->InputAt(index));
142 }
143
144 FloatRegister OutputFloat32Register() {
145 return ToFloat32Register(instr_->Output());
146 }
147
148 FloatRegister ToFloat32Register(InstructionOperand* op) {
149 return LowDwVfpRegister::from_code(ToDoubleRegister(op).code()).low();
150 }
139 }; 151 };
140 152
141
142 namespace { 153 namespace {
143 154
144 class OutOfLineLoadFloat final : public OutOfLineCode { 155 class OutOfLineLoadFloat32 final : public OutOfLineCode {
145 public: 156 public:
146 OutOfLineLoadFloat(CodeGenerator* gen, SwVfpRegister result) 157 OutOfLineLoadFloat32(CodeGenerator* gen, SwVfpRegister result)
147 : OutOfLineCode(gen), result_(result) {} 158 : OutOfLineCode(gen), result_(result) {}
148 159
149 void Generate() final { 160 void Generate() final {
150 // Compute sqrtf(-1.0f), which results in a quiet single-precision NaN. 161 // Compute sqrtf(-1.0f), which results in a quiet single-precision NaN.
151 __ vmov(result_, -1.0f); 162 __ vmov(result_, -1.0f);
152 __ vsqrt(result_, result_); 163 __ vsqrt(result_, result_);
153 } 164 }
154 165
155 private: 166 private:
156 SwVfpRegister const result_; 167 SwVfpRegister const result_;
(...skipping 910 matching lines...) Expand 10 before | Expand all | Expand 10 after
1067 if (instr->InputAt(2)->IsImmediate()) { 1078 if (instr->InputAt(2)->IsImmediate()) {
1068 __ AsrPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0), 1079 __ AsrPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
1069 i.InputRegister(1), i.InputInt32(2)); 1080 i.InputRegister(1), i.InputInt32(2));
1070 } else { 1081 } else {
1071 __ AsrPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0), 1082 __ AsrPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
1072 i.InputRegister(1), kScratchReg, i.InputRegister(2)); 1083 i.InputRegister(1), kScratchReg, i.InputRegister(2));
1073 } 1084 }
1074 break; 1085 break;
1075 case kArmVcmpF32: 1086 case kArmVcmpF32:
1076 if (instr->InputAt(1)->IsFPRegister()) { 1087 if (instr->InputAt(1)->IsFPRegister()) {
1077 __ VFPCompareAndSetFlags(i.InputFloatRegister(0), 1088 __ VFPCompareAndSetFlags(i.InputFloat32Register(0),
1078 i.InputFloatRegister(1)); 1089 i.InputFloat32Register(1));
1079 } else { 1090 } else {
1080 DCHECK(instr->InputAt(1)->IsImmediate()); 1091 DCHECK(instr->InputAt(1)->IsImmediate());
1081 // 0.0 is the only immediate supported by vcmp instructions. 1092 // 0.0 is the only immediate supported by vcmp instructions.
1082 DCHECK(i.InputFloat32(1) == 0.0f); 1093 DCHECK(i.InputFloat32(1) == 0.0f);
1083 __ VFPCompareAndSetFlags(i.InputFloatRegister(0), i.InputFloat32(1)); 1094 __ VFPCompareAndSetFlags(i.InputFloat32Register(0), i.InputFloat32(1));
1084 } 1095 }
1085 DCHECK_EQ(SetCC, i.OutputSBit()); 1096 DCHECK_EQ(SetCC, i.OutputSBit());
1086 break; 1097 break;
1087 case kArmVaddF32: 1098 case kArmVaddF32:
1088 __ vadd(i.OutputFloatRegister(), i.InputFloatRegister(0), 1099 __ vadd(i.OutputFloat32Register(), i.InputFloat32Register(0),
1089 i.InputFloatRegister(1)); 1100 i.InputFloat32Register(1));
1090 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1101 DCHECK_EQ(LeaveCC, i.OutputSBit());
1091 break; 1102 break;
1092 case kArmVsubF32: 1103 case kArmVsubF32:
1093 __ vsub(i.OutputFloatRegister(), i.InputFloatRegister(0), 1104 __ vsub(i.OutputFloat32Register(), i.InputFloat32Register(0),
1094 i.InputFloatRegister(1)); 1105 i.InputFloat32Register(1));
1095 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1106 DCHECK_EQ(LeaveCC, i.OutputSBit());
1096 break; 1107 break;
1097 case kArmVmulF32: 1108 case kArmVmulF32:
1098 __ vmul(i.OutputFloatRegister(), i.InputFloatRegister(0), 1109 __ vmul(i.OutputFloat32Register(), i.InputFloat32Register(0),
1099 i.InputFloatRegister(1)); 1110 i.InputFloat32Register(1));
1100 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1111 DCHECK_EQ(LeaveCC, i.OutputSBit());
1101 break; 1112 break;
1102 case kArmVmlaF32: 1113 case kArmVmlaF32:
1103 __ vmla(i.OutputFloatRegister(), i.InputFloatRegister(1), 1114 __ vmla(i.OutputFloat32Register(), i.InputFloat32Register(1),
1104 i.InputFloatRegister(2)); 1115 i.InputFloat32Register(2));
1105 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1116 DCHECK_EQ(LeaveCC, i.OutputSBit());
1106 break; 1117 break;
1107 case kArmVmlsF32: 1118 case kArmVmlsF32:
1108 __ vmls(i.OutputFloatRegister(), i.InputFloatRegister(1), 1119 __ vmls(i.OutputFloat32Register(), i.InputFloat32Register(1),
1109 i.InputFloatRegister(2)); 1120 i.InputFloat32Register(2));
1110 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1121 DCHECK_EQ(LeaveCC, i.OutputSBit());
1111 break; 1122 break;
1112 case kArmVdivF32: 1123 case kArmVdivF32:
1113 __ vdiv(i.OutputFloatRegister(), i.InputFloatRegister(0), 1124 __ vdiv(i.OutputFloat32Register(), i.InputFloat32Register(0),
1114 i.InputFloatRegister(1)); 1125 i.InputFloat32Register(1));
1115 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1126 DCHECK_EQ(LeaveCC, i.OutputSBit());
1116 break; 1127 break;
1117 case kArmVsqrtF32: 1128 case kArmVsqrtF32:
1118 __ vsqrt(i.OutputFloatRegister(), i.InputFloatRegister(0)); 1129 __ vsqrt(i.OutputFloat32Register(), i.InputFloat32Register(0));
1119 break; 1130 break;
1120 case kArmVabsF32: 1131 case kArmVabsF32:
1121 __ vabs(i.OutputFloatRegister(), i.InputFloatRegister(0)); 1132 __ vabs(i.OutputFloat32Register(), i.InputFloat32Register(0));
1122 break; 1133 break;
1123 case kArmVnegF32: 1134 case kArmVnegF32:
1124 __ vneg(i.OutputFloatRegister(), i.InputFloatRegister(0)); 1135 __ vneg(i.OutputFloat32Register(), i.InputFloat32Register(0));
1125 break; 1136 break;
1126 case kArmVcmpF64: 1137 case kArmVcmpF64:
1127 if (instr->InputAt(1)->IsFPRegister()) { 1138 if (instr->InputAt(1)->IsFPRegister()) {
1128 __ VFPCompareAndSetFlags(i.InputDoubleRegister(0), 1139 __ VFPCompareAndSetFlags(i.InputDoubleRegister(0),
1129 i.InputDoubleRegister(1)); 1140 i.InputDoubleRegister(1));
1130 } else { 1141 } else {
1131 DCHECK(instr->InputAt(1)->IsImmediate()); 1142 DCHECK(instr->InputAt(1)->IsImmediate());
1132 // 0.0 is the only immediate supported by vcmp instructions. 1143 // 0.0 is the only immediate supported by vcmp instructions.
1133 DCHECK(i.InputDouble(1) == 0.0); 1144 DCHECK(i.InputDouble(1) == 0.0);
1134 __ VFPCompareAndSetFlags(i.InputDoubleRegister(0), i.InputDouble(1)); 1145 __ VFPCompareAndSetFlags(i.InputDoubleRegister(0), i.InputDouble(1));
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
1182 case kArmVsqrtF64: 1193 case kArmVsqrtF64:
1183 __ vsqrt(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); 1194 __ vsqrt(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1184 break; 1195 break;
1185 case kArmVabsF64: 1196 case kArmVabsF64:
1186 __ vabs(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); 1197 __ vabs(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1187 break; 1198 break;
1188 case kArmVnegF64: 1199 case kArmVnegF64:
1189 __ vneg(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); 1200 __ vneg(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1190 break; 1201 break;
1191 case kArmVrintmF32: 1202 case kArmVrintmF32:
1192 __ vrintm(i.OutputFloatRegister(), i.InputFloatRegister(0)); 1203 __ vrintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
1193 break; 1204 break;
1194 case kArmVrintmF64: 1205 case kArmVrintmF64:
1195 __ vrintm(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); 1206 __ vrintm(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1196 break; 1207 break;
1197 case kArmVrintpF32: 1208 case kArmVrintpF32:
1198 __ vrintp(i.OutputFloatRegister(), i.InputFloatRegister(0)); 1209 __ vrintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
1199 break; 1210 break;
1200 case kArmVrintpF64: 1211 case kArmVrintpF64:
1201 __ vrintp(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); 1212 __ vrintp(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1202 break; 1213 break;
1203 case kArmVrintzF32: 1214 case kArmVrintzF32:
1204 __ vrintz(i.OutputFloatRegister(), i.InputFloatRegister(0)); 1215 __ vrintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
1205 break; 1216 break;
1206 case kArmVrintzF64: 1217 case kArmVrintzF64:
1207 __ vrintz(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); 1218 __ vrintz(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1208 break; 1219 break;
1209 case kArmVrintaF64: 1220 case kArmVrintaF64:
1210 __ vrinta(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); 1221 __ vrinta(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1211 break; 1222 break;
1212 case kArmVrintnF32: 1223 case kArmVrintnF32:
1213 __ vrintn(i.OutputFloatRegister(), i.InputFloatRegister(0)); 1224 __ vrintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
1214 break; 1225 break;
1215 case kArmVrintnF64: 1226 case kArmVrintnF64:
1216 __ vrintn(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); 1227 __ vrintn(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1217 break; 1228 break;
1218 case kArmVcvtF32F64: { 1229 case kArmVcvtF32F64: {
1219 __ vcvt_f32_f64(i.OutputFloatRegister(), i.InputDoubleRegister(0)); 1230 __ vcvt_f32_f64(i.OutputFloat32Register(), i.InputDoubleRegister(0));
1220 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1231 DCHECK_EQ(LeaveCC, i.OutputSBit());
1221 break; 1232 break;
1222 } 1233 }
1223 case kArmVcvtF64F32: { 1234 case kArmVcvtF64F32: {
1224 __ vcvt_f64_f32(i.OutputDoubleRegister(), i.InputFloatRegister(0)); 1235 __ vcvt_f64_f32(i.OutputDoubleRegister(), i.InputFloat32Register(0));
1225 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1236 DCHECK_EQ(LeaveCC, i.OutputSBit());
1226 break; 1237 break;
1227 } 1238 }
1228 case kArmVcvtF32S32: { 1239 case kArmVcvtF32S32: {
1229 SwVfpRegister scratch = kScratchDoubleReg.low(); 1240 SwVfpRegister scratch = kScratchDoubleReg.low();
1230 __ vmov(scratch, i.InputRegister(0)); 1241 __ vmov(scratch, i.InputRegister(0));
1231 __ vcvt_f32_s32(i.OutputFloatRegister(), scratch); 1242 __ vcvt_f32_s32(i.OutputFloat32Register(), scratch);
1232 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1243 DCHECK_EQ(LeaveCC, i.OutputSBit());
1233 break; 1244 break;
1234 } 1245 }
1235 case kArmVcvtF32U32: { 1246 case kArmVcvtF32U32: {
1236 SwVfpRegister scratch = kScratchDoubleReg.low(); 1247 SwVfpRegister scratch = kScratchDoubleReg.low();
1237 __ vmov(scratch, i.InputRegister(0)); 1248 __ vmov(scratch, i.InputRegister(0));
1238 __ vcvt_f32_u32(i.OutputFloatRegister(), scratch); 1249 __ vcvt_f32_u32(i.OutputFloat32Register(), scratch);
1239 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1250 DCHECK_EQ(LeaveCC, i.OutputSBit());
1240 break; 1251 break;
1241 } 1252 }
1242 case kArmVcvtF64S32: { 1253 case kArmVcvtF64S32: {
1243 SwVfpRegister scratch = kScratchDoubleReg.low(); 1254 SwVfpRegister scratch = kScratchDoubleReg.low();
1244 __ vmov(scratch, i.InputRegister(0)); 1255 __ vmov(scratch, i.InputRegister(0));
1245 __ vcvt_f64_s32(i.OutputDoubleRegister(), scratch); 1256 __ vcvt_f64_s32(i.OutputDoubleRegister(), scratch);
1246 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1257 DCHECK_EQ(LeaveCC, i.OutputSBit());
1247 break; 1258 break;
1248 } 1259 }
1249 case kArmVcvtF64U32: { 1260 case kArmVcvtF64U32: {
1250 SwVfpRegister scratch = kScratchDoubleReg.low(); 1261 SwVfpRegister scratch = kScratchDoubleReg.low();
1251 __ vmov(scratch, i.InputRegister(0)); 1262 __ vmov(scratch, i.InputRegister(0));
1252 __ vcvt_f64_u32(i.OutputDoubleRegister(), scratch); 1263 __ vcvt_f64_u32(i.OutputDoubleRegister(), scratch);
1253 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1264 DCHECK_EQ(LeaveCC, i.OutputSBit());
1254 break; 1265 break;
1255 } 1266 }
1256 case kArmVcvtS32F32: { 1267 case kArmVcvtS32F32: {
1257 SwVfpRegister scratch = kScratchDoubleReg.low(); 1268 SwVfpRegister scratch = kScratchDoubleReg.low();
1258 __ vcvt_s32_f32(scratch, i.InputFloatRegister(0)); 1269 __ vcvt_s32_f32(scratch, i.InputFloat32Register(0));
1259 __ vmov(i.OutputRegister(), scratch); 1270 __ vmov(i.OutputRegister(), scratch);
1260 // Avoid INT32_MAX as an overflow indicator and use INT32_MIN instead, 1271 // Avoid INT32_MAX as an overflow indicator and use INT32_MIN instead,
1261 // because INT32_MIN allows easier out-of-bounds detection. 1272 // because INT32_MIN allows easier out-of-bounds detection.
1262 __ cmn(i.OutputRegister(), Operand(1)); 1273 __ cmn(i.OutputRegister(), Operand(1));
1263 __ mov(i.OutputRegister(), Operand(INT32_MIN), SBit::LeaveCC, vs); 1274 __ mov(i.OutputRegister(), Operand(INT32_MIN), SBit::LeaveCC, vs);
1264 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1275 DCHECK_EQ(LeaveCC, i.OutputSBit());
1265 break; 1276 break;
1266 } 1277 }
1267 case kArmVcvtU32F32: { 1278 case kArmVcvtU32F32: {
1268 SwVfpRegister scratch = kScratchDoubleReg.low(); 1279 SwVfpRegister scratch = kScratchDoubleReg.low();
1269 __ vcvt_u32_f32(scratch, i.InputFloatRegister(0)); 1280 __ vcvt_u32_f32(scratch, i.InputFloat32Register(0));
1270 __ vmov(i.OutputRegister(), scratch); 1281 __ vmov(i.OutputRegister(), scratch);
1271 // Avoid UINT32_MAX as an overflow indicator and use 0 instead, 1282 // Avoid UINT32_MAX as an overflow indicator and use 0 instead,
1272 // because 0 allows easier out-of-bounds detection. 1283 // because 0 allows easier out-of-bounds detection.
1273 __ cmn(i.OutputRegister(), Operand(1)); 1284 __ cmn(i.OutputRegister(), Operand(1));
1274 __ adc(i.OutputRegister(), i.OutputRegister(), Operand::Zero()); 1285 __ adc(i.OutputRegister(), i.OutputRegister(), Operand::Zero());
1275 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1286 DCHECK_EQ(LeaveCC, i.OutputSBit());
1276 break; 1287 break;
1277 } 1288 }
1278 case kArmVcvtS32F64: { 1289 case kArmVcvtS32F64: {
1279 SwVfpRegister scratch = kScratchDoubleReg.low(); 1290 SwVfpRegister scratch = kScratchDoubleReg.low();
1280 __ vcvt_s32_f64(scratch, i.InputDoubleRegister(0)); 1291 __ vcvt_s32_f64(scratch, i.InputDoubleRegister(0));
1281 __ vmov(i.OutputRegister(), scratch); 1292 __ vmov(i.OutputRegister(), scratch);
1282 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1293 DCHECK_EQ(LeaveCC, i.OutputSBit());
1283 break; 1294 break;
1284 } 1295 }
1285 case kArmVcvtU32F64: { 1296 case kArmVcvtU32F64: {
1286 SwVfpRegister scratch = kScratchDoubleReg.low(); 1297 SwVfpRegister scratch = kScratchDoubleReg.low();
1287 __ vcvt_u32_f64(scratch, i.InputDoubleRegister(0)); 1298 __ vcvt_u32_f64(scratch, i.InputDoubleRegister(0));
1288 __ vmov(i.OutputRegister(), scratch); 1299 __ vmov(i.OutputRegister(), scratch);
1289 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1300 DCHECK_EQ(LeaveCC, i.OutputSBit());
1290 break; 1301 break;
1291 } 1302 }
1292 case kArmVmovU32F32: 1303 case kArmVmovU32F32:
1293 __ vmov(i.OutputRegister(), i.InputFloatRegister(0)); 1304 __ vmov(i.OutputRegister(), i.InputFloat32Register(0));
1294 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1305 DCHECK_EQ(LeaveCC, i.OutputSBit());
1295 break; 1306 break;
1296 case kArmVmovF32U32: 1307 case kArmVmovF32U32:
1297 __ vmov(i.OutputFloatRegister(), i.InputRegister(0)); 1308 __ vmov(i.OutputFloat32Register(), i.InputRegister(0));
1298 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1309 DCHECK_EQ(LeaveCC, i.OutputSBit());
1299 break; 1310 break;
1300 case kArmVmovLowU32F64: 1311 case kArmVmovLowU32F64:
1301 __ VmovLow(i.OutputRegister(), i.InputDoubleRegister(0)); 1312 __ VmovLow(i.OutputRegister(), i.InputDoubleRegister(0));
1302 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1313 DCHECK_EQ(LeaveCC, i.OutputSBit());
1303 break; 1314 break;
1304 case kArmVmovLowF64U32: 1315 case kArmVmovLowF64U32:
1305 __ VmovLow(i.OutputDoubleRegister(), i.InputRegister(1)); 1316 __ VmovLow(i.OutputDoubleRegister(), i.InputRegister(1));
1306 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1317 DCHECK_EQ(LeaveCC, i.OutputSBit());
1307 break; 1318 break;
(...skipping 32 matching lines...) Expand 10 before | Expand all | Expand 10 after
1340 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1351 DCHECK_EQ(LeaveCC, i.OutputSBit());
1341 break; 1352 break;
1342 case kArmLdr: 1353 case kArmLdr:
1343 __ ldr(i.OutputRegister(), i.InputOffset()); 1354 __ ldr(i.OutputRegister(), i.InputOffset());
1344 break; 1355 break;
1345 case kArmStr: 1356 case kArmStr:
1346 __ str(i.InputRegister(0), i.InputOffset(1)); 1357 __ str(i.InputRegister(0), i.InputOffset(1));
1347 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1358 DCHECK_EQ(LeaveCC, i.OutputSBit());
1348 break; 1359 break;
1349 case kArmVldrF32: { 1360 case kArmVldrF32: {
1350 __ vldr(i.OutputFloatRegister(), i.InputOffset()); 1361 __ vldr(i.OutputFloat32Register(), i.InputOffset());
1351 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1362 DCHECK_EQ(LeaveCC, i.OutputSBit());
1352 break; 1363 break;
1353 } 1364 }
1354 case kArmVstrF32: 1365 case kArmVstrF32:
1355 __ vstr(i.InputFloatRegister(0), i.InputOffset(1)); 1366 __ vstr(i.InputFloat32Register(0), i.InputOffset(1));
1356 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1367 DCHECK_EQ(LeaveCC, i.OutputSBit());
1357 break; 1368 break;
1358 case kArmVldrF64: 1369 case kArmVldrF64:
1359 __ vldr(i.OutputDoubleRegister(), i.InputOffset()); 1370 __ vldr(i.OutputDoubleRegister(), i.InputOffset());
1360 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1371 DCHECK_EQ(LeaveCC, i.OutputSBit());
1361 break; 1372 break;
1362 case kArmVstrF64: 1373 case kArmVstrF64:
1363 __ vstr(i.InputDoubleRegister(0), i.InputOffset(1)); 1374 __ vstr(i.InputDoubleRegister(0), i.InputOffset(1));
1364 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1375 DCHECK_EQ(LeaveCC, i.OutputSBit());
1365 break; 1376 break;
(...skipping 75 matching lines...) Expand 10 before | Expand all | Expand 10 after
1441 break; 1452 break;
1442 } 1453 }
1443 case kArmPush: 1454 case kArmPush:
1444 if (instr->InputAt(0)->IsFPRegister()) { 1455 if (instr->InputAt(0)->IsFPRegister()) {
1445 LocationOperand* op = LocationOperand::cast(instr->InputAt(0)); 1456 LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
1446 if (op->representation() == MachineRepresentation::kFloat64) { 1457 if (op->representation() == MachineRepresentation::kFloat64) {
1447 __ vpush(i.InputDoubleRegister(0)); 1458 __ vpush(i.InputDoubleRegister(0));
1448 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize); 1459 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1449 } else { 1460 } else {
1450 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation()); 1461 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
1451 __ vpush(i.InputFloatRegister(0)); 1462 __ vpush(i.InputFloat32Register(0));
1452 frame_access_state()->IncreaseSPDelta(1); 1463 frame_access_state()->IncreaseSPDelta(1);
1453 } 1464 }
1454 } else { 1465 } else {
1455 __ push(i.InputRegister(0)); 1466 __ push(i.InputRegister(0));
1456 frame_access_state()->IncreaseSPDelta(1); 1467 frame_access_state()->IncreaseSPDelta(1);
1457 } 1468 }
1458 DCHECK_EQ(LeaveCC, i.OutputSBit()); 1469 DCHECK_EQ(LeaveCC, i.OutputSBit());
1459 break; 1470 break;
1460 case kArmPoke: { 1471 case kArmPoke: {
1461 int const slot = MiscField::decode(instr->opcode()); 1472 int const slot = MiscField::decode(instr->opcode());
(...skipping 10 matching lines...) Expand all
1472 case kCheckedLoadInt16: 1483 case kCheckedLoadInt16:
1473 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrsh); 1484 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrsh);
1474 break; 1485 break;
1475 case kCheckedLoadUint16: 1486 case kCheckedLoadUint16:
1476 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrh); 1487 ASSEMBLE_CHECKED_LOAD_INTEGER(ldrh);
1477 break; 1488 break;
1478 case kCheckedLoadWord32: 1489 case kCheckedLoadWord32:
1479 ASSEMBLE_CHECKED_LOAD_INTEGER(ldr); 1490 ASSEMBLE_CHECKED_LOAD_INTEGER(ldr);
1480 break; 1491 break;
1481 case kCheckedLoadFloat32: 1492 case kCheckedLoadFloat32:
1482 ASSEMBLE_CHECKED_LOAD_FP(Float); 1493 ASSEMBLE_CHECKED_LOAD_FP(Float32);
1483 break; 1494 break;
1484 case kCheckedLoadFloat64: 1495 case kCheckedLoadFloat64:
1485 ASSEMBLE_CHECKED_LOAD_FP(Double); 1496 ASSEMBLE_CHECKED_LOAD_FP(Double);
1486 break; 1497 break;
1487 case kCheckedStoreWord8: 1498 case kCheckedStoreWord8:
1488 ASSEMBLE_CHECKED_STORE_INTEGER(strb); 1499 ASSEMBLE_CHECKED_STORE_INTEGER(strb);
1489 break; 1500 break;
1490 case kCheckedStoreWord16: 1501 case kCheckedStoreWord16:
1491 ASSEMBLE_CHECKED_STORE_INTEGER(strh); 1502 ASSEMBLE_CHECKED_STORE_INTEGER(strh);
1492 break; 1503 break;
1493 case kCheckedStoreWord32: 1504 case kCheckedStoreWord32:
1494 ASSEMBLE_CHECKED_STORE_INTEGER(str); 1505 ASSEMBLE_CHECKED_STORE_INTEGER(str);
1495 break; 1506 break;
1496 case kCheckedStoreFloat32: 1507 case kCheckedStoreFloat32:
1497 ASSEMBLE_CHECKED_STORE_FP(Float); 1508 ASSEMBLE_CHECKED_STORE_FP(Float32);
1498 break; 1509 break;
1499 case kCheckedStoreFloat64: 1510 case kCheckedStoreFloat64:
1500 ASSEMBLE_CHECKED_STORE_FP(Double); 1511 ASSEMBLE_CHECKED_STORE_FP(Double);
1501 break; 1512 break;
1502 case kCheckedLoadWord64: 1513 case kCheckedLoadWord64:
1503 case kCheckedStoreWord64: 1514 case kCheckedStoreWord64:
1504 UNREACHABLE(); // currently unsupported checked int64 load/store. 1515 UNREACHABLE(); // currently unsupported checked int64 load/store.
1505 break; 1516 break;
1506 1517
1507 case kAtomicLoadInt8: 1518 case kAtomicLoadInt8:
(...skipping 297 matching lines...) Expand 10 before | Expand all | Expand 10 after
1805 UNREACHABLE(); // TODO(dcarney): loading RPO constants on arm. 1816 UNREACHABLE(); // TODO(dcarney): loading RPO constants on arm.
1806 break; 1817 break;
1807 } 1818 }
1808 if (destination->IsStackSlot()) __ str(dst, g.ToMemOperand(destination)); 1819 if (destination->IsStackSlot()) __ str(dst, g.ToMemOperand(destination));
1809 } else if (src.type() == Constant::kFloat32) { 1820 } else if (src.type() == Constant::kFloat32) {
1810 if (destination->IsFPStackSlot()) { 1821 if (destination->IsFPStackSlot()) {
1811 MemOperand dst = g.ToMemOperand(destination); 1822 MemOperand dst = g.ToMemOperand(destination);
1812 __ mov(ip, Operand(bit_cast<int32_t>(src.ToFloat32()))); 1823 __ mov(ip, Operand(bit_cast<int32_t>(src.ToFloat32())));
1813 __ str(ip, dst); 1824 __ str(ip, dst);
1814 } else { 1825 } else {
1815 SwVfpRegister dst = g.ToFloatRegister(destination); 1826 SwVfpRegister dst = g.ToFloat32Register(destination);
1816 __ vmov(dst, src.ToFloat32()); 1827 __ vmov(dst, src.ToFloat32());
1817 } 1828 }
1818 } else { 1829 } else {
1819 DCHECK_EQ(Constant::kFloat64, src.type()); 1830 DCHECK_EQ(Constant::kFloat64, src.type());
1820 DwVfpRegister dst = destination->IsFPRegister() 1831 DwVfpRegister dst = destination->IsFPRegister()
1821 ? g.ToDoubleRegister(destination) 1832 ? g.ToDoubleRegister(destination)
1822 : kScratchDoubleReg; 1833 : kScratchDoubleReg;
1823 __ vmov(dst, src.ToFloat64(), kScratchReg); 1834 __ vmov(dst, src.ToFloat64(), kScratchReg);
1824 if (destination->IsFPStackSlot()) { 1835 if (destination->IsFPStackSlot()) {
1825 __ vstr(dst, g.ToMemOperand(destination)); 1836 __ vstr(dst, g.ToMemOperand(destination));
1826 } 1837 }
1827 } 1838 }
1828 } else if (source->IsFPRegister()) { 1839 } else if (source->IsFPRegister()) {
1829 MachineRepresentation rep = LocationOperand::cast(source)->representation(); 1840 DwVfpRegister src = g.ToDoubleRegister(source);
1830 if (rep == MachineRepresentation::kFloat64) { 1841 if (destination->IsFPRegister()) {
1831 DwVfpRegister src = g.ToDoubleRegister(source); 1842 DwVfpRegister dst = g.ToDoubleRegister(destination);
1832 if (destination->IsFPRegister()) { 1843 __ Move(dst, src);
1833 DwVfpRegister dst = g.ToDoubleRegister(destination);
1834 __ Move(dst, src);
1835 } else {
1836 DCHECK(destination->IsFPStackSlot());
1837 __ vstr(src, g.ToMemOperand(destination));
1838 }
1839 } else { 1844 } else {
1840 DCHECK_EQ(MachineRepresentation::kFloat32, rep); 1845 DCHECK(destination->IsFPStackSlot());
1841 SwVfpRegister src = g.ToFloatRegister(source); 1846 __ vstr(src, g.ToMemOperand(destination));
1842 if (destination->IsFPRegister()) {
1843 SwVfpRegister dst = g.ToFloatRegister(destination);
1844 __ Move(dst, src);
1845 } else {
1846 DCHECK(destination->IsFPStackSlot());
1847 __ vstr(src, g.ToMemOperand(destination));
1848 }
1849 } 1847 }
1850 } else if (source->IsFPStackSlot()) { 1848 } else if (source->IsFPStackSlot()) {
1851 MemOperand src = g.ToMemOperand(source); 1849 MemOperand src = g.ToMemOperand(source);
1852 MachineRepresentation rep =
1853 LocationOperand::cast(destination)->representation();
1854 if (destination->IsFPRegister()) { 1850 if (destination->IsFPRegister()) {
1855 if (rep == MachineRepresentation::kFloat64) {
1856 __ vldr(g.ToDoubleRegister(destination), src); 1851 __ vldr(g.ToDoubleRegister(destination), src);
1857 } else {
1858 DCHECK_EQ(MachineRepresentation::kFloat32, rep);
1859 __ vldr(g.ToFloatRegister(destination), src);
1860 }
1861 } else { 1852 } else {
1862 DCHECK(destination->IsFPStackSlot()); 1853 DCHECK(destination->IsFPStackSlot());
1863 if (rep == MachineRepresentation::kFloat64) {
1864 DwVfpRegister temp = kScratchDoubleReg; 1854 DwVfpRegister temp = kScratchDoubleReg;
1865 __ vldr(temp, src); 1855 __ vldr(temp, src);
1866 __ vstr(temp, g.ToMemOperand(destination)); 1856 __ vstr(temp, g.ToMemOperand(destination));
1867 } else {
1868 DCHECK_EQ(MachineRepresentation::kFloat32, rep);
1869 SwVfpRegister temp = kScratchDoubleReg.low();
1870 __ vldr(temp, src);
1871 __ vstr(temp, g.ToMemOperand(destination));
1872 }
1873 } 1857 }
1874 } else { 1858 } else {
1875 UNREACHABLE(); 1859 UNREACHABLE();
1876 } 1860 }
1877 } 1861 }
1878 1862
1879 1863
1880 void CodeGenerator::AssembleSwap(InstructionOperand* source, 1864 void CodeGenerator::AssembleSwap(InstructionOperand* source,
1881 InstructionOperand* destination) { 1865 InstructionOperand* destination) {
1882 ArmOperandConverter g(this, nullptr); 1866 ArmOperandConverter g(this, nullptr);
(...skipping 19 matching lines...) Expand all
1902 DCHECK(destination->IsStackSlot()); 1886 DCHECK(destination->IsStackSlot());
1903 Register temp_0 = kScratchReg; 1887 Register temp_0 = kScratchReg;
1904 SwVfpRegister temp_1 = kScratchDoubleReg.low(); 1888 SwVfpRegister temp_1 = kScratchDoubleReg.low();
1905 MemOperand src = g.ToMemOperand(source); 1889 MemOperand src = g.ToMemOperand(source);
1906 MemOperand dst = g.ToMemOperand(destination); 1890 MemOperand dst = g.ToMemOperand(destination);
1907 __ ldr(temp_0, src); 1891 __ ldr(temp_0, src);
1908 __ vldr(temp_1, dst); 1892 __ vldr(temp_1, dst);
1909 __ str(temp_0, dst); 1893 __ str(temp_0, dst);
1910 __ vstr(temp_1, src); 1894 __ vstr(temp_1, src);
1911 } else if (source->IsFPRegister()) { 1895 } else if (source->IsFPRegister()) {
1912 MachineRepresentation rep = LocationOperand::cast(source)->representation();
1913 LowDwVfpRegister temp = kScratchDoubleReg; 1896 LowDwVfpRegister temp = kScratchDoubleReg;
1914 if (rep == MachineRepresentation::kFloat64) {
1915 DwVfpRegister src = g.ToDoubleRegister(source); 1897 DwVfpRegister src = g.ToDoubleRegister(source);
1916 if (destination->IsFPRegister()) { 1898 if (destination->IsFPRegister()) {
1917 DwVfpRegister dst = g.ToDoubleRegister(destination); 1899 DwVfpRegister dst = g.ToDoubleRegister(destination);
1918 __ Move(temp, src); 1900 __ Move(temp, src);
1919 __ Move(src, dst); 1901 __ Move(src, dst);
1920 __ Move(dst, temp); 1902 __ Move(dst, temp);
1921 } else { 1903 } else {
1922 DCHECK(destination->IsFPStackSlot()); 1904 DCHECK(destination->IsFPStackSlot());
1923 MemOperand dst = g.ToMemOperand(destination); 1905 MemOperand dst = g.ToMemOperand(destination);
1924 __ Move(temp, src); 1906 __ Move(temp, src);
1925 __ vldr(src, dst); 1907 __ vldr(src, dst);
1926 __ vstr(temp, dst); 1908 __ vstr(temp, dst);
1927 } 1909 }
1928 } else {
1929 DCHECK_EQ(MachineRepresentation::kFloat32, rep);
1930 SwVfpRegister src = g.ToFloatRegister(source);
1931 if (destination->IsFPRegister()) {
1932 SwVfpRegister dst = g.ToFloatRegister(destination);
1933 __ Move(temp.low(), src);
1934 __ Move(src, dst);
1935 __ Move(dst, temp.low());
1936 } else {
1937 DCHECK(destination->IsFPStackSlot());
1938 MemOperand dst = g.ToMemOperand(destination);
1939 __ Move(temp.low(), src);
1940 __ vldr(src, dst);
1941 __ vstr(temp.low(), dst);
1942 }
1943 }
1944 } else if (source->IsFPStackSlot()) { 1910 } else if (source->IsFPStackSlot()) {
1945 DCHECK(destination->IsFPStackSlot()); 1911 DCHECK(destination->IsFPStackSlot());
1946 Register temp_0 = kScratchReg; 1912 Register temp_0 = kScratchReg;
1947 LowDwVfpRegister temp_1 = kScratchDoubleReg; 1913 LowDwVfpRegister temp_1 = kScratchDoubleReg;
1948 MemOperand src0 = g.ToMemOperand(source); 1914 MemOperand src0 = g.ToMemOperand(source);
1949 MemOperand dst0 = g.ToMemOperand(destination); 1915 MemOperand dst0 = g.ToMemOperand(destination);
1950 MachineRepresentation rep = LocationOperand::cast(source)->representation();
1951 if (rep == MachineRepresentation::kFloat64) {
1952 MemOperand src1(src0.rn(), src0.offset() + kPointerSize); 1916 MemOperand src1(src0.rn(), src0.offset() + kPointerSize);
1953 MemOperand dst1(dst0.rn(), dst0.offset() + kPointerSize); 1917 MemOperand dst1(dst0.rn(), dst0.offset() + kPointerSize);
1954 __ vldr(temp_1, dst0); // Save destination in temp_1. 1918 __ vldr(temp_1, dst0); // Save destination in temp_1.
1955 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination. 1919 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination.
1956 __ str(temp_0, dst0); 1920 __ str(temp_0, dst0);
1957 __ ldr(temp_0, src1); 1921 __ ldr(temp_0, src1);
1958 __ str(temp_0, dst1); 1922 __ str(temp_0, dst1);
1959 __ vstr(temp_1, src0); 1923 __ vstr(temp_1, src0);
1960 } else {
1961 DCHECK_EQ(MachineRepresentation::kFloat32, rep);
1962 __ vldr(temp_1.low(), dst0); // Save destination in temp_1.
1963 __ ldr(temp_0, src0); // Then use temp_0 to copy source to destination.
1964 __ str(temp_0, dst0);
1965 __ vstr(temp_1.low(), src0);
1966 }
1967 } else { 1924 } else {
1968 // No other combinations are possible. 1925 // No other combinations are possible.
1969 UNREACHABLE(); 1926 UNREACHABLE();
1970 } 1927 }
1971 } 1928 }
1972 1929
1973 1930
1974 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { 1931 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1975 // On 32-bit ARM we emit the jump tables inline. 1932 // On 32-bit ARM we emit the jump tables inline.
1976 UNREACHABLE(); 1933 UNREACHABLE();
(...skipping 19 matching lines...) Expand all
1996 padding_size -= v8::internal::Assembler::kInstrSize; 1953 padding_size -= v8::internal::Assembler::kInstrSize;
1997 } 1954 }
1998 } 1955 }
1999 } 1956 }
2000 1957
2001 #undef __ 1958 #undef __
2002 1959
2003 } // namespace compiler 1960 } // namespace compiler
2004 } // namespace internal 1961 } // namespace internal
2005 } // namespace v8 1962 } // namespace v8
OLDNEW
« no previous file with comments | « no previous file | src/compiler/instruction.h » ('j') | src/compiler/register-allocator.cc » ('J')

Powered by Google App Engine
This is Rietveld 408576698