Index: src/arm/lithium-codegen-arm.cc |
diff --git a/src/arm/lithium-codegen-arm.cc b/src/arm/lithium-codegen-arm.cc |
index 606721f2da1230b5d5ca3b90240f3596b15ae68d..7625ee57666643c757eb9c75cd0ae221191ea148 100644 |
--- a/src/arm/lithium-codegen-arm.cc |
+++ b/src/arm/lithium-codegen-arm.cc |
@@ -2150,7 +2150,14 @@ void LCodeGen::EmitBranch(InstrType instr, Condition condition) { |
} |
-template<class InstrType> |
+template <class InstrType> |
+void LCodeGen::EmitTrueBranch(InstrType instr, Condition condition) { |
+ int true_block = instr->TrueDestination(chunk_); |
+ __ b(condition, chunk_->GetAssemblyLabel(true_block)); |
+} |
+ |
+ |
+template <class InstrType> |
void LCodeGen::EmitFalseBranch(InstrType instr, Condition condition) { |
int false_block = instr->FalseDestination(chunk_); |
__ b(condition, chunk_->GetAssemblyLabel(false_block)); |
@@ -2725,160 +2732,40 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
DCHECK(ToRegister(instr->context()).is(cp)); |
- DCHECK(ToRegister(instr->left()).is(r0)); // Object is in r0. |
- DCHECK(ToRegister(instr->right()).is(r1)); // Function is in r1. |
- |
- InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); |
+ DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister())); |
+ DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister())); |
+ DCHECK(ToRegister(instr->result()).is(r0)); |
+ InstanceOfStub stub(isolate()); |
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
- |
- __ cmp(r0, Operand::Zero()); |
- __ mov(r0, Operand(factory()->false_value()), LeaveCC, ne); |
- __ mov(r0, Operand(factory()->true_value()), LeaveCC, eq); |
} |
-void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
- class DeferredInstanceOfKnownGlobal final : public LDeferredCode { |
- public: |
- DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
- LInstanceOfKnownGlobal* instr) |
- : LDeferredCode(codegen), instr_(instr) { } |
- void Generate() override { |
- codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_, |
- &load_bool_); |
- } |
- LInstruction* instr() override { return instr_; } |
- Label* map_check() { return &map_check_; } |
- Label* load_bool() { return &load_bool_; } |
- |
- private: |
- LInstanceOfKnownGlobal* instr_; |
- Label map_check_; |
- Label load_bool_; |
- }; |
- |
- DeferredInstanceOfKnownGlobal* deferred; |
- deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); |
- |
- Label done, false_result; |
- Register object = ToRegister(instr->value()); |
- Register temp = ToRegister(instr->temp()); |
- Register result = ToRegister(instr->result()); |
+void LCodeGen::DoHasInPrototypeChainAndBranch( |
+ LHasInPrototypeChainAndBranch* instr) { |
+ Register const object = ToRegister(instr->object()); |
+ Register const object_map = scratch0(); |
+ Register const object_prototype = object_map; |
+ Register const prototype = ToRegister(instr->prototype()); |
- // A Smi is not instance of anything. |
- __ JumpIfSmi(object, &false_result); |
- |
- // This is the inlined call site instanceof cache. The two occurences of the |
- // hole value will be patched to the last map/result pair generated by the |
- // instanceof stub. |
- Label cache_miss; |
- Register map = temp; |
- __ ldr(map, FieldMemOperand(object, HeapObject::kMapOffset)); |
- { |
- // Block constant pool emission to ensure the positions of instructions are |
- // as expected by the patcher. See InstanceofStub::Generate(). |
- Assembler::BlockConstPoolScope block_const_pool(masm()); |
- __ bind(deferred->map_check()); // Label for calculating code patching. |
- // We use Factory::the_hole_value() on purpose instead of loading from the |
- // root array to force relocation to be able to later patch with |
- // the cached map. |
- Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); |
- __ mov(ip, Operand(cell)); |
- __ ldr(ip, FieldMemOperand(ip, Cell::kValueOffset)); |
- __ cmp(map, Operand(ip)); |
- __ b(ne, &cache_miss); |
- __ bind(deferred->load_bool()); // Label for calculating code patching. |
- // We use Factory::the_hole_value() on purpose instead of loading from the |
- // root array to force relocation to be able to later patch |
- // with true or false. |
- __ mov(result, Operand(factory()->the_hole_value())); |
+ // The {object} must be a spec object. It's sufficient to know that {object} |
+ // is not a smi, since all other non-spec objects have {null} prototypes and |
+ // will be ruled out below. |
+ if (instr->hydrogen()->ObjectNeedsSmiCheck()) { |
+ __ SmiTst(object); |
+ EmitFalseBranch(instr, eq); |
} |
- __ b(&done); |
- |
- // The inlined call site cache did not match. Check null and string before |
- // calling the deferred code. |
- __ bind(&cache_miss); |
- // Null is not instance of anything. |
- __ LoadRoot(ip, Heap::kNullValueRootIndex); |
- __ cmp(object, Operand(ip)); |
- __ b(eq, &false_result); |
- |
- // String values is not instance of anything. |
- Condition is_string = masm_->IsObjectStringType(object, temp); |
- __ b(is_string, &false_result); |
- |
- // Go to the deferred code. |
- __ b(deferred->entry()); |
- |
- __ bind(&false_result); |
- __ LoadRoot(result, Heap::kFalseValueRootIndex); |
- |
- // Here result has either true or false. Deferred code also produces true or |
- // false object. |
- __ bind(deferred->exit()); |
- __ bind(&done); |
-} |
- |
- |
-void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
- Label* map_check, |
- Label* bool_load) { |
- InstanceofStub::Flags flags = InstanceofStub::kNoFlags; |
- flags = static_cast<InstanceofStub::Flags>( |
- flags | InstanceofStub::kArgsInRegisters); |
- flags = static_cast<InstanceofStub::Flags>( |
- flags | InstanceofStub::kCallSiteInlineCheck); |
- flags = static_cast<InstanceofStub::Flags>( |
- flags | InstanceofStub::kReturnTrueFalseObject); |
- InstanceofStub stub(isolate(), flags); |
- PushSafepointRegistersScope scope(this); |
- LoadContextFromDeferred(instr->context()); |
- |
- __ Move(InstanceofStub::right(), instr->function()); |
- |
- int call_size = CallCodeSize(stub.GetCode(), RelocInfo::CODE_TARGET); |
- int additional_delta = (call_size / Assembler::kInstrSize) + 4; |
- { |
- // Make sure that code size is predicable, since we use specific constants |
- // offsets in the code to find embedded values.. |
- PredictableCodeSizeScope predictable( |
- masm_, additional_delta * Assembler::kInstrSize); |
- // The labels must be already bound since the code has predictabel size up |
- // to the call instruction. |
- DCHECK(map_check->is_bound()); |
- DCHECK(bool_load->is_bound()); |
- // Make sure we don't emit any additional entries in the constant pool |
- // before the call to ensure that the CallCodeSize() calculated the |
- // correct number of instructions for the constant pool load. |
- { |
- ConstantPoolUnavailableScope constant_pool_unavailable(masm_); |
- int map_check_delta = |
- masm_->InstructionsGeneratedSince(map_check) + additional_delta; |
- int bool_load_delta = |
- masm_->InstructionsGeneratedSince(bool_load) + additional_delta; |
- Label before_push_delta; |
- __ bind(&before_push_delta); |
- __ BlockConstPoolFor(additional_delta); |
- // r5 is used to communicate the offset to the location of the map check. |
- __ mov(r5, Operand(map_check_delta * kPointerSize)); |
- // r6 is used to communicate the offset to the location of the bool load. |
- __ mov(r6, Operand(bool_load_delta * kPointerSize)); |
- // The mov above can generate one or two instructions. The delta was |
- // computed for two instructions, so we need to pad here in case of one |
- // instruction. |
- while (masm_->InstructionsGeneratedSince(&before_push_delta) != 4) { |
- __ nop(); |
- } |
- } |
- CallCodeGeneric(stub.GetCode(), RelocInfo::CODE_TARGET, instr, |
- RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); |
- } |
- LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); |
- safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
- // Put the result value (r0) into the result register slot and |
- // restore all registers. |
- __ StoreToSafepointRegisterSlot(r0, ToRegister(instr->result())); |
+ // Loop through the {object}s prototype chain looking for the {prototype}. |
+ __ ldr(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
+ Label loop; |
+ __ bind(&loop); |
+ __ ldr(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset)); |
+ __ cmp(object_prototype, prototype); |
+ EmitTrueBranch(instr, eq); |
+ __ CompareRoot(object_prototype, Heap::kNullValueRootIndex); |
+ EmitFalseBranch(instr, eq); |
+ __ ldr(object_map, FieldMemOperand(object_prototype, HeapObject::kMapOffset)); |
+ __ b(&loop); |
} |