Chromium Code Reviews| Index: runtime/vm/intermediate_language_mips.cc |
| diff --git a/runtime/vm/intermediate_language_mips.cc b/runtime/vm/intermediate_language_mips.cc |
| index 342e267f6a438de6ea9edc5fa4bbca72e6aec53f..bf3066a1d0849ea665b6c08090894ee12c086f02 100644 |
| --- a/runtime/vm/intermediate_language_mips.cc |
| +++ b/runtime/vm/intermediate_language_mips.cc |
| @@ -4859,6 +4859,70 @@ void CheckClassIdInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| } |
| +LocationSummary* GenericCheckBoundInstr::MakeLocationSummary(Zone* zone, |
| + bool opt) const { |
| + const intptr_t kNumInputs = 2; |
| + const intptr_t kNumTemps = 0; |
| + LocationSummary* locs = new(zone) LocationSummary( |
| + zone, kNumInputs, kNumTemps, LocationSummary::kCallOnSlowPath); |
| + locs->set_in(kLengthPos, Location::RequiresRegister()); |
| + locs->set_in(kIndexPos, Location::RequiresRegister()); |
| + return locs; |
| +} |
| + |
| + |
| +class RangeErrorSlowPath : public SlowPathCode { |
| + public: |
| + RangeErrorSlowPath(GenericCheckBoundInstr* instruction, intptr_t try_index) |
| + : instruction_(instruction), try_index_(try_index) { } |
| + |
| + virtual void EmitNativeCode(FlowGraphCompiler* compiler) { |
| + if (Assembler::EmittingComments()) { |
| + __ Comment("slow path check bound operation"); |
| + } |
| + __ Bind(entry_label()); |
| + LocationSummary* locs = instruction_->locs(); |
| + __ Push(locs->in(0).reg()); |
| + __ Push(locs->in(1).reg()); |
| + compiler->GenerateRuntimeCall(instruction_->token_pos(), |
| + instruction_->deopt_id(), |
| + kRangeErrorRuntimeEntry, |
| + 2, |
| + instruction_->locs()); |
| + compiler->RecordSafepoint(locs, /* slow_path_argument_count = */ 2); |
| + compiler->pc_descriptors_list()->AddDescriptor( |
| + RawPcDescriptors::kOther, |
| + compiler->assembler()->CodeSize(), |
| + instruction_->deopt_id(), |
| + instruction_->token_pos(), |
| + try_index_); |
| + __ break_(0); |
| + } |
| + |
| + private: |
| + GenericCheckBoundInstr* instruction_; |
| + intptr_t try_index_; |
| +}; |
| + |
| + |
| +void GenericCheckBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| + RangeErrorSlowPath* slow_path = |
| + new RangeErrorSlowPath(this, compiler->CurrentTryIndex()); |
| + compiler->AddSlowPathCode(slow_path); |
| + |
| + Location length_loc = locs()->in(kLengthPos); |
| + Location index_loc = locs()->in(kIndexPos); |
| + Register length = length_loc.reg(); |
| + Register index = index_loc.reg(); |
| + const intptr_t index_cid = this->index()->Type()->ToCid(); |
| + if (index_cid != kSmiCid) { |
| + __ andi(CMPRES1, index, Immediate(kSmiTagMask)); |
|
Vyacheslav Egorov (Google)
2016/07/14 09:35:59
I wonder if assembler could benefit from BranchIfS
Florian Schneider
2016/07/14 17:05:21
Ya, I wished that too.
I'll add BranchIfNotSmi an
|
| + __ bne(CMPRES1, ZR, slow_path->entry_label()); |
| + } |
| + __ BranchUnsignedGreaterEqual(index, length, slow_path->entry_label()); |
| +} |
| + |
| + |
| LocationSummary* CheckArrayBoundInstr::MakeLocationSummary(Zone* zone, |
| bool opt) const { |
| const intptr_t kNumInputs = 2; |
| @@ -4892,6 +4956,7 @@ void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| return; |
| } |
| + const intptr_t index_cid = index()->Type()->ToCid(); |
| if (index_loc.IsConstant()) { |
| Register length = length_loc.reg(); |
| const Smi& index = Smi::Cast(index_loc.constant()); |
| @@ -4900,6 +4965,10 @@ void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| } else if (length_loc.IsConstant()) { |
| const Smi& length = Smi::Cast(length_loc.constant()); |
| Register index = index_loc.reg(); |
| + if (index_cid != kSmiCid) { |
| + __ andi(CMPRES1, index, Immediate(kSmiTagMask)); |
| + __ bne(CMPRES1, ZR, deopt); |
| + } |
| if (length.Value() == Smi::kMaxValue) { |
| __ BranchSignedLess(index, Immediate(0), deopt); |
| } else { |
| @@ -4909,6 +4978,10 @@ void CheckArrayBoundInstr::EmitNativeCode(FlowGraphCompiler* compiler) { |
| } else { |
| Register length = length_loc.reg(); |
| Register index = index_loc.reg(); |
| + if (index_cid != kSmiCid) { |
| + __ andi(CMPRES1, index, Immediate(kSmiTagMask)); |
| + __ bne(CMPRES1, ZR, deopt); |
| + } |
| __ BranchUnsignedGreaterEqual(index, length, deopt); |
| } |
| } |