| Index: src/ia32/lithium-codegen-ia32.cc
|
| diff --git a/src/ia32/lithium-codegen-ia32.cc b/src/ia32/lithium-codegen-ia32.cc
|
| index 71ad8387a0cb897c7cfa96a3b0f70892c6f1f053..a3e469adcad6989d6ac5d09e78251947f8066be3 100644
|
| --- a/src/ia32/lithium-codegen-ia32.cc
|
| +++ b/src/ia32/lithium-codegen-ia32.cc
|
| @@ -2055,6 +2055,17 @@ void LCodeGen::EmitBranch(InstrType instr, Condition cc) {
|
| }
|
|
|
|
|
| +template <class InstrType>
|
| +void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) {
|
| + int true_block = instr->TrueDestination(chunk_);
|
| + if (cc == no_condition) {
|
| + __ jmp(chunk_->GetAssemblyLabel(true_block));
|
| + } else {
|
| + __ j(cc, chunk_->GetAssemblyLabel(true_block));
|
| + }
|
| +}
|
| +
|
| +
|
| template<class InstrType>
|
| void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) {
|
| int false_block = instr->FalseDestination(chunk_);
|
| @@ -2614,120 +2625,41 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) {
|
|
|
|
|
| void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
|
| - // Object and function are in fixed registers defined by the stub.
|
| DCHECK(ToRegister(instr->context()).is(esi));
|
| - InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
|
| + DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister()));
|
| + DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister()));
|
| + DCHECK(ToRegister(instr->result()).is(eax));
|
| + InstanceOfStub stub(isolate());
|
| CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr);
|
| -
|
| - Label true_value, done;
|
| - __ test(eax, Operand(eax));
|
| - __ j(zero, &true_value, Label::kNear);
|
| - __ mov(ToRegister(instr->result()), factory()->false_value());
|
| - __ jmp(&done, Label::kNear);
|
| - __ bind(&true_value);
|
| - __ mov(ToRegister(instr->result()), factory()->true_value());
|
| - __ bind(&done);
|
| -}
|
| -
|
| -
|
| -void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
|
| - class DeferredInstanceOfKnownGlobal final : public LDeferredCode {
|
| - public:
|
| - DeferredInstanceOfKnownGlobal(LCodeGen* codegen,
|
| - LInstanceOfKnownGlobal* instr)
|
| - : LDeferredCode(codegen), instr_(instr) { }
|
| - void Generate() override {
|
| - codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_);
|
| - }
|
| - LInstruction* instr() override { return instr_; }
|
| - Label* map_check() { return &map_check_; }
|
| - private:
|
| - LInstanceOfKnownGlobal* instr_;
|
| - Label map_check_;
|
| - };
|
| -
|
| - DeferredInstanceOfKnownGlobal* deferred;
|
| - deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr);
|
| -
|
| - Label done, false_result;
|
| - Register object = ToRegister(instr->value());
|
| - Register temp = ToRegister(instr->temp());
|
| -
|
| - // A Smi is not an instance of anything.
|
| - __ JumpIfSmi(object, &false_result, Label::kNear);
|
| -
|
| - // This is the inlined call site instanceof cache. The two occurences of the
|
| - // hole value will be patched to the last map/result pair generated by the
|
| - // instanceof stub.
|
| - Label cache_miss;
|
| - Register map = ToRegister(instr->temp());
|
| - __ mov(map, FieldOperand(object, HeapObject::kMapOffset));
|
| - __ bind(deferred->map_check()); // Label for calculating code patching.
|
| - Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value());
|
| - __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map.
|
| - __ j(not_equal, &cache_miss, Label::kNear);
|
| - __ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
|
| - __ jmp(&done, Label::kNear);
|
| -
|
| - // The inlined call site cache did not match. Check for null and string
|
| - // before calling the deferred code.
|
| - __ bind(&cache_miss);
|
| - // Null is not an instance of anything.
|
| - __ cmp(object, factory()->null_value());
|
| - __ j(equal, &false_result, Label::kNear);
|
| -
|
| - // String values are not instances of anything.
|
| - Condition is_string = masm_->IsObjectStringType(object, temp, temp);
|
| - __ j(is_string, &false_result, Label::kNear);
|
| -
|
| - // Go to the deferred code.
|
| - __ jmp(deferred->entry());
|
| -
|
| - __ bind(&false_result);
|
| - __ mov(ToRegister(instr->result()), factory()->false_value());
|
| -
|
| - // Here result has either true or false. Deferred code also produces true or
|
| - // false object.
|
| - __ bind(deferred->exit());
|
| - __ bind(&done);
|
| }
|
|
|
|
|
| -void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
|
| - Label* map_check) {
|
| - PushSafepointRegistersScope scope(this);
|
| +void LCodeGen::DoHasInPrototypeChainAndBranch(
|
| + LHasInPrototypeChainAndBranch* instr) {
|
| + Register const object = ToRegister(instr->object());
|
| + Register const object_map = ToRegister(instr->scratch());
|
| + Register const object_prototype = object_map;
|
| + Register const prototype = ToRegister(instr->prototype());
|
|
|
| - InstanceofStub::Flags flags = InstanceofStub::kNoFlags;
|
| - flags = static_cast<InstanceofStub::Flags>(
|
| - flags | InstanceofStub::kArgsInRegisters);
|
| - flags = static_cast<InstanceofStub::Flags>(
|
| - flags | InstanceofStub::kCallSiteInlineCheck);
|
| - flags = static_cast<InstanceofStub::Flags>(
|
| - flags | InstanceofStub::kReturnTrueFalseObject);
|
| - InstanceofStub stub(isolate(), flags);
|
| -
|
| - // Get the temp register reserved by the instruction. This needs to be a
|
| - // register which is pushed last by PushSafepointRegisters as top of the
|
| - // stack is used to pass the offset to the location of the map check to
|
| - // the stub.
|
| - Register temp = ToRegister(instr->temp());
|
| - DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0);
|
| - __ LoadHeapObject(InstanceofStub::right(), instr->function());
|
| - static const int kAdditionalDelta = 13;
|
| - int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta;
|
| - __ mov(temp, Immediate(delta));
|
| - __ StoreToSafepointRegisterSlot(temp, temp);
|
| - CallCodeGeneric(stub.GetCode(),
|
| - RelocInfo::CODE_TARGET,
|
| - instr,
|
| - RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS);
|
| - // Get the deoptimization index of the LLazyBailout-environment that
|
| - // corresponds to this instruction.
|
| - LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment();
|
| - safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index());
|
| + // The {object} must be a spec object. It's sufficient to know that {object}
|
| + // is not a smi, since all other non-spec objects have {null} prototypes and
|
| + // will be ruled out below.
|
| + if (instr->hydrogen()->ObjectNeedsSmiCheck()) {
|
| + __ test(object, Immediate(kSmiTagMask));
|
| + EmitFalseBranch(instr, zero);
|
| + }
|
|
|
| - // Put the result value into the eax slot and restore all registers.
|
| - __ StoreToSafepointRegisterSlot(eax, eax);
|
| + // Loop through the {object}s prototype chain looking for the {prototype}.
|
| + __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset));
|
| + Label loop;
|
| + __ bind(&loop);
|
| + __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
|
| + __ cmp(object_prototype, prototype);
|
| + EmitTrueBranch(instr, equal);
|
| + __ cmp(object_prototype, factory()->null_value());
|
| + EmitFalseBranch(instr, equal);
|
| + __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
|
| + __ jmp(&loop);
|
| }
|
|
|
|
|
|
|