OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/compiler/instruction-selector-impl.h" | 5 #include "src/compiler/instruction-selector-impl.h" |
6 #include "src/compiler/node-matchers.h" | 6 #include "src/compiler/node-matchers.h" |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 namespace compiler { | 10 namespace compiler { |
(...skipping 337 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
348 } else if (m->IsWord64Xor() && m->right().Is(-1)) { | 348 } else if (m->IsWord64Xor() && m->right().Is(-1)) { |
349 selector->Emit(kArm64Not, g.DefineAsRegister(node), | 349 selector->Emit(kArm64Not, g.DefineAsRegister(node), |
350 g.UseRegister(m->left().node())); | 350 g.UseRegister(m->left().node())); |
351 } else { | 351 } else { |
352 VisitBinop<Matcher>(selector, node, opcode, imm_mode); | 352 VisitBinop<Matcher>(selector, node, opcode, imm_mode); |
353 } | 353 } |
354 } | 354 } |
355 | 355 |
356 | 356 |
357 void InstructionSelector::VisitWord32And(Node* node) { | 357 void InstructionSelector::VisitWord32And(Node* node) { |
| 358 Arm64OperandGenerator g(this); |
358 Int32BinopMatcher m(node); | 359 Int32BinopMatcher m(node); |
| 360 if (m.left().IsWord32Shr() && CanCover(node, m.left().node()) && |
| 361 m.right().HasValue()) { |
| 362 uint32_t mask = m.right().Value(); |
| 363 uint32_t mask_width = base::bits::CountPopulation32(mask); |
| 364 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask); |
| 365 if ((mask_width != 0) && (mask_msb + mask_width == 32)) { |
| 366 // The mask must be contiguous, and occupy the least-significant bits. |
| 367 DCHECK_EQ(0, base::bits::CountTrailingZeros32(mask)); |
| 368 |
| 369 // Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least |
| 370 // significant bits. |
| 371 Int32BinopMatcher mleft(m.left().node()); |
| 372 if (mleft.right().IsInRange(0, 31)) { |
| 373 Emit(kArm64Ubfx32, g.DefineAsRegister(node), |
| 374 g.UseRegister(mleft.left().node()), |
| 375 g.UseImmediate(mleft.right().node()), g.TempImmediate(mask_width)); |
| 376 return; |
| 377 } |
| 378 // Other cases fall through to the normal And operation. |
| 379 } |
| 380 } |
359 VisitLogical<Int32BinopMatcher>( | 381 VisitLogical<Int32BinopMatcher>( |
360 this, node, &m, kArm64And32, CanCover(node, m.left().node()), | 382 this, node, &m, kArm64And32, CanCover(node, m.left().node()), |
361 CanCover(node, m.right().node()), kLogical32Imm); | 383 CanCover(node, m.right().node()), kLogical32Imm); |
362 } | 384 } |
363 | 385 |
364 | 386 |
365 void InstructionSelector::VisitWord64And(Node* node) { | 387 void InstructionSelector::VisitWord64And(Node* node) { |
| 388 Arm64OperandGenerator g(this); |
366 Int64BinopMatcher m(node); | 389 Int64BinopMatcher m(node); |
| 390 if (m.left().IsWord64Shr() && CanCover(node, m.left().node()) && |
| 391 m.right().HasValue()) { |
| 392 uint64_t mask = m.right().Value(); |
| 393 uint64_t mask_width = base::bits::CountPopulation64(mask); |
| 394 uint64_t mask_msb = base::bits::CountLeadingZeros64(mask); |
| 395 if ((mask_width != 0) && (mask_msb + mask_width == 64)) { |
| 396 // The mask must be contiguous, and occupy the least-significant bits. |
| 397 DCHECK_EQ(0, base::bits::CountTrailingZeros64(mask)); |
| 398 |
| 399 // Select Ubfx for And(Shr(x, imm), mask) where the mask is in the least |
| 400 // significant bits. |
| 401 Int64BinopMatcher mleft(m.left().node()); |
| 402 if (mleft.right().IsInRange(0, 63)) { |
| 403 Emit(kArm64Ubfx, g.DefineAsRegister(node), |
| 404 g.UseRegister(mleft.left().node()), |
| 405 g.UseImmediate(mleft.right().node()), g.TempImmediate(mask_width)); |
| 406 return; |
| 407 } |
| 408 // Other cases fall through to the normal And operation. |
| 409 } |
| 410 } |
367 VisitLogical<Int64BinopMatcher>( | 411 VisitLogical<Int64BinopMatcher>( |
368 this, node, &m, kArm64And, CanCover(node, m.left().node()), | 412 this, node, &m, kArm64And, CanCover(node, m.left().node()), |
369 CanCover(node, m.right().node()), kLogical64Imm); | 413 CanCover(node, m.right().node()), kLogical64Imm); |
370 } | 414 } |
371 | 415 |
372 | 416 |
373 void InstructionSelector::VisitWord32Or(Node* node) { | 417 void InstructionSelector::VisitWord32Or(Node* node) { |
374 Int32BinopMatcher m(node); | 418 Int32BinopMatcher m(node); |
375 VisitLogical<Int32BinopMatcher>( | 419 VisitLogical<Int32BinopMatcher>( |
376 this, node, &m, kArm64Or32, CanCover(node, m.left().node()), | 420 this, node, &m, kArm64Or32, CanCover(node, m.left().node()), |
(...skipping 19 matching lines...) Expand all Loading... |
396 | 440 |
397 void InstructionSelector::VisitWord64Xor(Node* node) { | 441 void InstructionSelector::VisitWord64Xor(Node* node) { |
398 Int64BinopMatcher m(node); | 442 Int64BinopMatcher m(node); |
399 VisitLogical<Int64BinopMatcher>( | 443 VisitLogical<Int64BinopMatcher>( |
400 this, node, &m, kArm64Eor, CanCover(node, m.left().node()), | 444 this, node, &m, kArm64Eor, CanCover(node, m.left().node()), |
401 CanCover(node, m.right().node()), kLogical64Imm); | 445 CanCover(node, m.right().node()), kLogical64Imm); |
402 } | 446 } |
403 | 447 |
404 | 448 |
405 void InstructionSelector::VisitWord32Shl(Node* node) { | 449 void InstructionSelector::VisitWord32Shl(Node* node) { |
406 VisitRRO(this, kArm64Shl32, node, kShift32Imm); | 450 VisitRRO(this, kArm64Lsl32, node, kShift32Imm); |
407 } | 451 } |
408 | 452 |
409 | 453 |
410 void InstructionSelector::VisitWord64Shl(Node* node) { | 454 void InstructionSelector::VisitWord64Shl(Node* node) { |
411 VisitRRO(this, kArm64Shl, node, kShift64Imm); | 455 VisitRRO(this, kArm64Lsl, node, kShift64Imm); |
412 } | 456 } |
413 | 457 |
414 | 458 |
415 void InstructionSelector::VisitWord32Shr(Node* node) { | 459 void InstructionSelector::VisitWord32Shr(Node* node) { |
416 VisitRRO(this, kArm64Shr32, node, kShift32Imm); | 460 Arm64OperandGenerator g(this); |
| 461 Int32BinopMatcher m(node); |
| 462 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) { |
| 463 int32_t lsb = m.right().Value(); |
| 464 Int32BinopMatcher mleft(m.left().node()); |
| 465 if (mleft.right().HasValue()) { |
| 466 uint32_t mask = (mleft.right().Value() >> lsb) << lsb; |
| 467 uint32_t mask_width = base::bits::CountPopulation32(mask); |
| 468 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask); |
| 469 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is |
| 470 // shifted into the least-significant bits. |
| 471 if ((mask_msb + mask_width + lsb) == 32) { |
| 472 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(mask)); |
| 473 Emit(kArm64Ubfx32, g.DefineAsRegister(node), |
| 474 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb), |
| 475 g.TempImmediate(mask_width)); |
| 476 return; |
| 477 } |
| 478 } |
| 479 } |
| 480 VisitRRO(this, kArm64Lsr32, node, kShift32Imm); |
417 } | 481 } |
418 | 482 |
419 | 483 |
420 void InstructionSelector::VisitWord64Shr(Node* node) { | 484 void InstructionSelector::VisitWord64Shr(Node* node) { |
421 VisitRRO(this, kArm64Shr, node, kShift64Imm); | 485 Arm64OperandGenerator g(this); |
| 486 Int64BinopMatcher m(node); |
| 487 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) { |
| 488 int64_t lsb = m.right().Value(); |
| 489 Int64BinopMatcher mleft(m.left().node()); |
| 490 if (mleft.right().HasValue()) { |
| 491 // Select Ubfx for Shr(And(x, mask), imm) where the result of the mask is |
| 492 // shifted into the least-significant bits. |
| 493 uint64_t mask = (mleft.right().Value() >> lsb) << lsb; |
| 494 uint64_t mask_width = base::bits::CountPopulation64(mask); |
| 495 uint64_t mask_msb = base::bits::CountLeadingZeros64(mask); |
| 496 if ((mask_msb + mask_width + lsb) == 64) { |
| 497 DCHECK_EQ(lsb, base::bits::CountTrailingZeros64(mask)); |
| 498 Emit(kArm64Ubfx, g.DefineAsRegister(node), |
| 499 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb), |
| 500 g.TempImmediate(mask_width)); |
| 501 return; |
| 502 } |
| 503 } |
| 504 } |
| 505 VisitRRO(this, kArm64Lsr, node, kShift64Imm); |
422 } | 506 } |
423 | 507 |
424 | 508 |
425 void InstructionSelector::VisitWord32Sar(Node* node) { | 509 void InstructionSelector::VisitWord32Sar(Node* node) { |
426 VisitRRO(this, kArm64Sar32, node, kShift32Imm); | 510 VisitRRO(this, kArm64Asr32, node, kShift32Imm); |
427 } | 511 } |
428 | 512 |
429 | 513 |
430 void InstructionSelector::VisitWord64Sar(Node* node) { | 514 void InstructionSelector::VisitWord64Sar(Node* node) { |
431 VisitRRO(this, kArm64Sar, node, kShift64Imm); | 515 VisitRRO(this, kArm64Asr, node, kShift64Imm); |
432 } | 516 } |
433 | 517 |
434 | 518 |
435 void InstructionSelector::VisitWord32Ror(Node* node) { | 519 void InstructionSelector::VisitWord32Ror(Node* node) { |
436 VisitRRO(this, kArm64Ror32, node, kShift32Imm); | 520 VisitRRO(this, kArm64Ror32, node, kShift32Imm); |
437 } | 521 } |
438 | 522 |
439 | 523 |
440 void InstructionSelector::VisitWord64Ror(Node* node) { | 524 void InstructionSelector::VisitWord64Ror(Node* node) { |
441 VisitRRO(this, kArm64Ror, node, kShift64Imm); | 525 VisitRRO(this, kArm64Ror, node, kShift64Imm); |
(...skipping 462 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
904 call_instr->MarkAsCall(); | 988 call_instr->MarkAsCall(); |
905 if (deoptimization != NULL) { | 989 if (deoptimization != NULL) { |
906 DCHECK(continuation != NULL); | 990 DCHECK(continuation != NULL); |
907 call_instr->MarkAsControl(); | 991 call_instr->MarkAsControl(); |
908 } | 992 } |
909 } | 993 } |
910 | 994 |
911 } // namespace compiler | 995 } // namespace compiler |
912 } // namespace internal | 996 } // namespace internal |
913 } // namespace v8 | 997 } // namespace v8 |
OLD | NEW |