Index: src/compiler/arm64/code-generator-arm64.cc |
diff --git a/src/compiler/arm64/code-generator-arm64.cc b/src/compiler/arm64/code-generator-arm64.cc |
index 1aa20207e73abfa90c5b9082e72242eded0c3d99..8281b7b13cca6de6494156f7ae850044800a48d4 100644 |
--- a/src/compiler/arm64/code-generator-arm64.cc |
+++ b/src/compiler/arm64/code-generator-arm64.cc |
@@ -357,6 +357,9 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { |
case kArchJmp: |
AssembleArchJump(i.InputRpo(0)); |
break; |
+ case kArchSwitch: |
+ AssembleArchSwitch(instr); |
+ break; |
case kArchNop: |
// don't emit code for nops. |
break; |
@@ -838,6 +841,22 @@ void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { |
} |
+void CodeGenerator::AssembleArchSwitch(Instruction* instr) { |
+ Arm64OperandConverter i(this, instr); |
+ UseScratchRegisterScope scope(masm()); |
+ Register reg = i.InputRegister(0); |
+ Register tmp = scope.AcquireX(); |
+ Label table; |
+ __ Adr(tmp, &table); |
+ __ Add(tmp, tmp, Operand(reg, LSL, 2)); |
+ __ Br(tmp); |
+ __ Bind(&table); |
+ for (size_t index = 1; index < instr->InputCount(); ++index) { |
+ __ B(GetLabel(i.InputRpo(index))); |
+ } |
+} |
+ |
+ |
// Assemble boolean materializations after this instruction. |
void CodeGenerator::AssembleArchBoolean(Instruction* instr, |
FlagsCondition condition) { |
@@ -1084,6 +1103,12 @@ void CodeGenerator::AssembleSwap(InstructionOperand* source, |
} |
+void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) { |
+ // On 64-bit ARM we emit the jump tables inline. |
+ UNREACHABLE(); |
+} |
+ |
+ |
void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); } |