Index: src/mips64/lithium-codegen-mips64.cc |
diff --git a/src/mips64/lithium-codegen-mips64.cc b/src/mips64/lithium-codegen-mips64.cc |
index 77813d50cb2bdd53b84aa711c8b13c9770a5b68a..50ba1707ad0515fde9ed254750672d7374070ec4 100644 |
--- a/src/mips64/lithium-codegen-mips64.cc |
+++ b/src/mips64/lithium-codegen-mips64.cc |
@@ -2158,11 +2158,17 @@ void LCodeGen::EmitBranchF(InstrType instr, |
} |
-template<class InstrType> |
-void LCodeGen::EmitFalseBranch(InstrType instr, |
- Condition condition, |
- Register src1, |
- const Operand& src2) { |
+template <class InstrType> |
+void LCodeGen::EmitTrueBranch(InstrType instr, Condition condition, |
+ Register src1, const Operand& src2) { |
+ int true_block = instr->TrueDestination(chunk_); |
+ __ Branch(chunk_->GetAssemblyLabel(true_block), condition, src1, src2); |
+} |
+ |
+ |
+template <class InstrType> |
+void LCodeGen::EmitFalseBranch(InstrType instr, Condition condition, |
+ Register src1, const Operand& src2) { |
int false_block = instr->FalseDestination(chunk_); |
__ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2); |
} |
@@ -2756,141 +2762,41 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
DCHECK(ToRegister(instr->context()).is(cp)); |
Label true_label, done; |
- DCHECK(ToRegister(instr->left()).is(a0)); // Object is in a0. |
- DCHECK(ToRegister(instr->right()).is(a1)); // Function is in a1. |
- Register result = ToRegister(instr->result()); |
- DCHECK(result.is(v0)); |
+ DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister())); |
+ DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister())); |
+ DCHECK(ToRegister(instr->result()).is(v0)); |
- InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); |
+ InstanceOfStub stub(isolate()); |
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
- |
- __ Branch(&true_label, eq, result, Operand(zero_reg)); |
- __ li(result, Operand(factory()->false_value())); |
- __ Branch(&done); |
- __ bind(&true_label); |
- __ li(result, Operand(factory()->true_value())); |
- __ bind(&done); |
-} |
- |
- |
-void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
- class DeferredInstanceOfKnownGlobal final : public LDeferredCode { |
- public: |
- DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
- LInstanceOfKnownGlobal* instr) |
- : LDeferredCode(codegen), instr_(instr) { } |
- void Generate() override { |
- codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); |
- } |
- LInstruction* instr() override { return instr_; } |
- Label* map_check() { return &map_check_; } |
- |
- private: |
- LInstanceOfKnownGlobal* instr_; |
- Label map_check_; |
- }; |
- |
- DeferredInstanceOfKnownGlobal* deferred; |
- deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); |
- |
- Label done, false_result; |
- Register object = ToRegister(instr->value()); |
- Register temp = ToRegister(instr->temp()); |
- Register result = ToRegister(instr->result()); |
- |
- DCHECK(object.is(a0)); |
- DCHECK(result.is(v0)); |
- |
- // A Smi is not instance of anything. |
- __ JumpIfSmi(object, &false_result); |
- |
- // This is the inlined call site instanceof cache. The two occurences of the |
- // hole value will be patched to the last map/result pair generated by the |
- // instanceof stub. |
- Label cache_miss; |
- Register map = temp; |
- __ ld(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
- |
- Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
- __ bind(deferred->map_check()); // Label for calculating code patching. |
- // We use Factory::the_hole_value() on purpose instead of loading from the |
- // root array to force relocation to be able to later patch with |
- // the cached map. |
- Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); |
- __ li(at, Operand(cell)); |
- __ ld(at, FieldMemOperand(at, Cell::kValueOffset)); |
- __ BranchShort(&cache_miss, ne, map, Operand(at)); |
- // We use Factory::the_hole_value() on purpose instead of loading from the |
- // root array to force relocation to be able to later patch |
- // with true or false. The distance from map check has to be constant. |
- __ li(result, Operand(factory()->the_hole_value())); |
- __ Branch(&done); |
- |
- // The inlined call site cache did not match. Check null and string before |
- // calling the deferred code. |
- __ bind(&cache_miss); |
- // Null is not instance of anything. |
- __ LoadRoot(temp, Heap::kNullValueRootIndex); |
- __ Branch(&false_result, eq, object, Operand(temp)); |
- |
- // String values is not instance of anything. |
- Condition cc = __ IsObjectStringType(object, temp, temp); |
- __ Branch(&false_result, cc, temp, Operand(zero_reg)); |
- |
- // Go to the deferred code. |
- __ Branch(deferred->entry()); |
- |
- __ bind(&false_result); |
- __ LoadRoot(result, Heap::kFalseValueRootIndex); |
- |
- // Here result has either true or false. Deferred code also produces true or |
- // false object. |
- __ bind(deferred->exit()); |
- __ bind(&done); |
} |
-void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
- Label* map_check) { |
- Register result = ToRegister(instr->result()); |
- DCHECK(result.is(v0)); |
+void LCodeGen::DoHasInPrototypeChainAndBranch( |
+ LHasInPrototypeChainAndBranch* instr) { |
+ Register const object = ToRegister(instr->object()); |
+ Register const object_map = scratch0(); |
+ Register const object_prototype = object_map; |
+ Register const prototype = ToRegister(instr->prototype()); |
- InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
- flags = static_cast<InstanceofStub::Flags>( |
- flags | InstanceofStub::kArgsInRegisters); |
- flags = static_cast<InstanceofStub::Flags>( |
- flags | InstanceofStub::kCallSiteInlineCheck); |
- flags = static_cast<InstanceofStub::Flags>( |
- flags | InstanceofStub::kReturnTrueFalseObject); |
- InstanceofStub stub(isolate(), flags); |
- |
- PushSafepointRegistersScope scope(this); |
- LoadContextFromDeferred(instr->context()); |
+ // The {object} must be a spec object. It's sufficient to know that {object} |
+ // is not a smi, since all other non-spec objects have {null} prototypes and |
+ // will be ruled out below. |
+ if (instr->hydrogen()->ObjectNeedsSmiCheck()) { |
+ __ SmiTst(object, at); |
+ EmitFalseBranch(instr, eq, at, Operand(zero_reg)); |
+ } |
- // Get the temp register reserved by the instruction. This needs to be a4 as |
- // its slot of the pushing of safepoint registers is used to communicate the |
- // offset to the location of the map check. |
- Register temp = ToRegister(instr->temp()); |
- DCHECK(temp.is(a4)); |
- __ li(InstanceofStub::right(), instr->function()); |
- static const int kAdditionalDelta = 13; |
- int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; |
- Label before_push_delta; |
- __ bind(&before_push_delta); |
- { |
- Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); |
- __ li(temp, Operand(delta * kIntSize), CONSTANT_SIZE); |
- __ StoreToSafepointRegisterSlot(temp, temp); |
- } |
- CallCodeGeneric(stub.GetCode(), |
- RelocInfo::CODE_TARGET, |
- instr, |
- RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
- LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); |
- safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
- // Put the result value into the result register slot and |
- // restore all registers. |
- __ StoreToSafepointRegisterSlot(result, result); |
+ // Loop through the {object}s prototype chain looking for the {prototype}. |
+ __ ld(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
+ Label loop; |
+ __ bind(&loop); |
+ __ ld(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset)); |
+ EmitTrueBranch(instr, eq, object_prototype, Operand(prototype)); |
+ __ LoadRoot(at, Heap::kNullValueRootIndex); |
+ EmitFalseBranch(instr, eq, object_prototype, Operand(at)); |
+ __ Branch(&loop, USE_DELAY_SLOT); |
+ __ ld(object_map, FieldMemOperand(object_prototype, |
+ HeapObject::kMapOffset)); // In delay slot. |
} |