OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 249 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
260 if (g.CanBeImmediate(index, immediate_mode)) { | 260 if (g.CanBeImmediate(index, immediate_mode)) { |
261 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, | 261 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, |
262 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value)); | 262 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value)); |
263 } else { | 263 } else { |
264 Emit(opcode | AddressingModeField::encode(kMode_MRR), NULL, | 264 Emit(opcode | AddressingModeField::encode(kMode_MRR), NULL, |
265 g.UseRegister(base), g.UseRegister(index), g.UseRegister(value)); | 265 g.UseRegister(base), g.UseRegister(index), g.UseRegister(value)); |
266 } | 266 } |
267 } | 267 } |
268 | 268 |
269 | 269 |
| 270 template <typename Matcher> |
| 271 static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m, |
| 272 ArchOpcode opcode, bool left_can_cover, |
| 273 bool right_can_cover, ImmediateMode imm_mode) { |
| 274 Arm64OperandGenerator g(selector); |
| 275 |
| 276 // Map instruction to equivalent operation with inverted right input. |
| 277 ArchOpcode inv_opcode = opcode; |
| 278 switch (opcode) { |
| 279 case kArm64And32: |
| 280 inv_opcode = kArm64Bic32; |
| 281 break; |
| 282 case kArm64And: |
| 283 inv_opcode = kArm64Bic; |
| 284 break; |
| 285 case kArm64Or32: |
| 286 inv_opcode = kArm64Orn32; |
| 287 break; |
| 288 case kArm64Or: |
| 289 inv_opcode = kArm64Orn; |
| 290 break; |
| 291 case kArm64Eor32: |
| 292 inv_opcode = kArm64Eon32; |
| 293 break; |
| 294 case kArm64Eor: |
| 295 inv_opcode = kArm64Eon; |
| 296 break; |
| 297 default: |
| 298 UNREACHABLE(); |
| 299 } |
| 300 |
| 301 // Select Logical(y, ~x) for Logical(Xor(x, -1), y). |
| 302 if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) { |
| 303 Matcher mleft(m->left().node()); |
| 304 if (mleft.right().Is(-1)) { |
| 305 // TODO(all): support shifted operand on right. |
| 306 selector->Emit(inv_opcode, g.DefineAsRegister(node), |
| 307 g.UseRegister(m->right().node()), |
| 308 g.UseRegister(mleft.left().node())); |
| 309 return; |
| 310 } |
| 311 } |
| 312 |
| 313 // Select Logical(x, ~y) for Logical(x, Xor(y, -1)). |
| 314 if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) && |
| 315 right_can_cover) { |
| 316 Matcher mright(m->right().node()); |
| 317 if (mright.right().Is(-1)) { |
| 318 // TODO(all): support shifted operand on right. |
| 319 selector->Emit(inv_opcode, g.DefineAsRegister(node), |
| 320 g.UseRegister(m->left().node()), |
| 321 g.UseRegister(mright.left().node())); |
| 322 return; |
| 323 } |
| 324 } |
| 325 |
| 326 if (m->IsWord32Xor() && m->right().Is(-1)) { |
| 327 selector->Emit(kArm64Not32, g.DefineAsRegister(node), |
| 328 g.UseRegister(m->left().node())); |
| 329 } else if (m->IsWord64Xor() && m->right().Is(-1)) { |
| 330 selector->Emit(kArm64Not, g.DefineAsRegister(node), |
| 331 g.UseRegister(m->left().node())); |
| 332 } else { |
| 333 VisitBinop<Matcher>(selector, node, opcode, imm_mode); |
| 334 } |
| 335 } |
| 336 |
| 337 |
270 void InstructionSelector::VisitWord32And(Node* node) { | 338 void InstructionSelector::VisitWord32And(Node* node) { |
271 VisitBinop<Int32BinopMatcher>(this, node, kArm64And32, kLogical32Imm); | 339 Int32BinopMatcher m(node); |
| 340 VisitLogical<Int32BinopMatcher>( |
| 341 this, node, &m, kArm64And32, CanCover(node, m.left().node()), |
| 342 CanCover(node, m.right().node()), kLogical32Imm); |
272 } | 343 } |
273 | 344 |
274 | 345 |
275 void InstructionSelector::VisitWord64And(Node* node) { | 346 void InstructionSelector::VisitWord64And(Node* node) { |
276 VisitBinop<Int64BinopMatcher>(this, node, kArm64And, kLogical64Imm); | 347 Int64BinopMatcher m(node); |
| 348 VisitLogical<Int64BinopMatcher>( |
| 349 this, node, &m, kArm64And, CanCover(node, m.left().node()), |
| 350 CanCover(node, m.right().node()), kLogical64Imm); |
277 } | 351 } |
278 | 352 |
279 | 353 |
280 void InstructionSelector::VisitWord32Or(Node* node) { | 354 void InstructionSelector::VisitWord32Or(Node* node) { |
281 VisitBinop<Int32BinopMatcher>(this, node, kArm64Or32, kLogical32Imm); | 355 Int32BinopMatcher m(node); |
| 356 VisitLogical<Int32BinopMatcher>( |
| 357 this, node, &m, kArm64Or32, CanCover(node, m.left().node()), |
| 358 CanCover(node, m.right().node()), kLogical32Imm); |
282 } | 359 } |
283 | 360 |
284 | 361 |
285 void InstructionSelector::VisitWord64Or(Node* node) { | 362 void InstructionSelector::VisitWord64Or(Node* node) { |
286 VisitBinop<Int64BinopMatcher>(this, node, kArm64Or, kLogical64Imm); | 363 Int64BinopMatcher m(node); |
| 364 VisitLogical<Int64BinopMatcher>( |
| 365 this, node, &m, kArm64Or, CanCover(node, m.left().node()), |
| 366 CanCover(node, m.right().node()), kLogical64Imm); |
287 } | 367 } |
288 | 368 |
289 | 369 |
290 void InstructionSelector::VisitWord32Xor(Node* node) { | 370 void InstructionSelector::VisitWord32Xor(Node* node) { |
291 Arm64OperandGenerator g(this); | |
292 Int32BinopMatcher m(node); | 371 Int32BinopMatcher m(node); |
293 if (m.right().Is(-1)) { | 372 VisitLogical<Int32BinopMatcher>( |
294 Emit(kArm64Not32, g.DefineAsRegister(node), g.UseRegister(m.left().node())); | 373 this, node, &m, kArm64Eor32, CanCover(node, m.left().node()), |
295 } else { | 374 CanCover(node, m.right().node()), kLogical32Imm); |
296 VisitBinop<Int32BinopMatcher>(this, node, kArm64Xor32, kLogical32Imm); | |
297 } | |
298 } | 375 } |
299 | 376 |
300 | 377 |
301 void InstructionSelector::VisitWord64Xor(Node* node) { | 378 void InstructionSelector::VisitWord64Xor(Node* node) { |
302 Arm64OperandGenerator g(this); | |
303 Int64BinopMatcher m(node); | 379 Int64BinopMatcher m(node); |
304 if (m.right().Is(-1)) { | 380 VisitLogical<Int64BinopMatcher>( |
305 Emit(kArm64Not, g.DefineAsRegister(node), g.UseRegister(m.left().node())); | 381 this, node, &m, kArm64Eor, CanCover(node, m.left().node()), |
306 } else { | 382 CanCover(node, m.right().node()), kLogical64Imm); |
307 VisitBinop<Int64BinopMatcher>(this, node, kArm64Xor, kLogical32Imm); | |
308 } | |
309 } | 383 } |
310 | 384 |
311 | 385 |
312 void InstructionSelector::VisitWord32Shl(Node* node) { | 386 void InstructionSelector::VisitWord32Shl(Node* node) { |
313 VisitRRO(this, kArm64Shl32, node, kShift32Imm); | 387 VisitRRO(this, kArm64Shl32, node, kShift32Imm); |
314 } | 388 } |
315 | 389 |
316 | 390 |
317 void InstructionSelector::VisitWord64Shl(Node* node) { | 391 void InstructionSelector::VisitWord64Shl(Node* node) { |
318 VisitRRO(this, kArm64Shl, node, kShift64Imm); | 392 VisitRRO(this, kArm64Shl, node, kShift64Imm); |
(...skipping 362 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
681 call_instr->MarkAsCall(); | 755 call_instr->MarkAsCall(); |
682 if (deoptimization != NULL) { | 756 if (deoptimization != NULL) { |
683 DCHECK(continuation != NULL); | 757 DCHECK(continuation != NULL); |
684 call_instr->MarkAsControl(); | 758 call_instr->MarkAsControl(); |
685 } | 759 } |
686 } | 760 } |
687 | 761 |
688 } // namespace compiler | 762 } // namespace compiler |
689 } // namespace internal | 763 } // namespace internal |
690 } // namespace v8 | 764 } // namespace v8 |
OLD | NEW |