Index: runtime/vm/intermediate_language_arm64.cc |
diff --git a/runtime/vm/intermediate_language_arm64.cc b/runtime/vm/intermediate_language_arm64.cc |
index 642bc9ef5a39ce8050a72cf660dc8e0c504a96be..c4f0fdc2d4fd6efdd248caf00dfd6b94cdc04cec 100644 |
--- a/runtime/vm/intermediate_language_arm64.cc |
+++ b/runtime/vm/intermediate_language_arm64.cc |
@@ -5254,8 +5254,68 @@ void CheckSmiInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
Label* deopt = compiler->AddDeoptStub(deopt_id(), |
ICData::kDeoptCheckSmi, |
licm_hoisted_ ? ICData::kHoisted : 0); |
- __ tsti(value, Immediate(kSmiTagMask)); |
- __ b(deopt, NE); |
+ __ BranchIfNotSmi(value, deopt); |
+} |
+ |
+ |
+ |
+LocationSummary* GenericCheckBoundInstr::MakeLocationSummary(Zone* zone, |
+ bool opt) const { |
+ const intptr_t kNumInputs = 2; |
+ const intptr_t kNumTemps = 0; |
+ LocationSummary* locs = new(zone) LocationSummary( |
+ zone, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); |
+ locs->set_in(kLengthPos, Location::RequiresRegister()); |
+ locs->set_in(kIndexPos, Location::RequiresRegister()); |
+ return locs; |
+} |
+ |
+ |
+class RangeErrorSlowPath : public SlowPathCode { |
+ public: |
+ RangeErrorSlowPath(GenericCheckBoundInstr* instruction, intptr_t try_index) |
+ : instruction_(instruction), try_index_(try_index) { } |
+ |
+ virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
+ if (Assembler::EmittingComments()) { |
+ __ Comment("slow path check bound operation"); |
+ } |
+ __ Bind(entry_label()); |
+ LocationSummary* locs = instruction_->locs(); |
+ __ Push(locs->in(0).reg()); |
+ __ Push(locs->in(1).reg()); |
+ __ CallRuntime(kRangeErrorRuntimeEntry, 2); |
+ compiler->pc_descriptors_list()->AddDescriptor( |
+ RawPcDescriptors::kOther, |
+ compiler->assembler()->CodeSize(), |
+ instruction_->deopt_id(), |
+ instruction_->token_pos(), |
+ try_index_); |
+ compiler->RecordSafepoint(locs, 2); |
+ __ brk(0); |
+ } |
+ |
+ private: |
+ GenericCheckBoundInstr* instruction_; |
+ intptr_t try_index_; |
+}; |
+ |
+ |
+void GenericCheckBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
+ RangeErrorSlowPath* slow_path = |
+ new RangeErrorSlowPath(this, compiler->CurrentTryIndex()); |
+ compiler->AddSlowPathCode(slow_path); |
+ |
+ Location length_loc = locs()->in(kLengthPos); |
+ Location index_loc = locs()->in(kIndexPos); |
+ Register length = length_loc.reg(); |
+ Register index = index_loc.reg(); |
+ const intptr_t index_cid = this->index()->Type()->ToCid(); |
+ if (index_cid != kSmiCid) { |
+ __ BranchIfNotSmi(index, slow_path->entry_label()); |
+ } |
+ __ cmp(index, Operand(length)); |
+ __ b(slow_path->entry_label(), CS); |
} |
@@ -5282,6 +5342,7 @@ void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
Location length_loc = locs()->in(kLengthPos); |
Location index_loc = locs()->in(kIndexPos); |
+ const intptr_t index_cid = index()->Type()->ToCid(); |
if (length_loc.IsConstant() && index_loc.IsConstant()) { |
// TODO(srdjan): remove this code once failures are fixed. |
if ((Smi::Cast(length_loc.constant()).Value() > |
@@ -5307,6 +5368,9 @@ void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} else if (length_loc.IsConstant()) { |
const Smi& length = Smi::Cast(length_loc.constant()); |
const Register index = index_loc.reg(); |
+ if (index_cid != kSmiCid) { |
+ __ BranchIfNotSmi(index, deopt); |
+ } |
if (length.Value() == Smi::kMaxValue) { |
__ tst(index, Operand(index)); |
__ b(deopt, MI); |
@@ -5317,6 +5381,9 @@ void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
} else { |
const Register length = length_loc.reg(); |
const Register index = index_loc.reg(); |
+ if (index_cid != kSmiCid) { |
+ __ BranchIfNotSmi(index, deopt); |
+ } |
__ CompareRegisters(index, length); |
__ b(deopt, CS); |
} |