Index: src/x64/lithium-codegen-x64.cc |
diff --git a/src/x64/lithium-codegen-x64.cc b/src/x64/lithium-codegen-x64.cc |
index b936edc7ded2e5ef8c1ff7260f48cd41594e271c..fd1956fa02fd7ae24d0d6552d40ecc16328fc725 100644 |
--- a/src/x64/lithium-codegen-x64.cc |
+++ b/src/x64/lithium-codegen-x64.cc |
@@ -2093,7 +2093,14 @@ void LCodeGen::EmitBranch(InstrType instr, Condition cc) { |
} |
-template<class InstrType> |
+template <class InstrType> |
+void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) { |
+ int true_block = instr->TrueDestination(chunk_); |
+ __ j(cc, chunk_->GetAssemblyLabel(true_block)); |
+} |
+ |
+ |
+template <class InstrType> |
void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) { |
int false_block = instr->FalseDestination(chunk_); |
__ j(cc, chunk_->GetAssemblyLabel(false_block)); |
@@ -2671,128 +2678,40 @@ void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
DCHECK(ToRegister(instr->context()).is(rsi)); |
- InstanceofStub stub(isolate(), InstanceofStub::kNoFlags); |
- __ Push(ToRegister(instr->left())); |
- __ Push(ToRegister(instr->right())); |
+ DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister())); |
+ DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister())); |
+ DCHECK(ToRegister(instr->result()).is(rax)); |
+ InstanceOfStub stub(isolate()); |
CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
- Label true_value, done; |
- __ testp(rax, rax); |
- __ j(zero, &true_value, Label::kNear); |
- __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
- __ jmp(&done, Label::kNear); |
- __ bind(&true_value); |
- __ LoadRoot(ToRegister(instr->result()), Heap::kTrueValueRootIndex); |
- __ bind(&done); |
} |
-void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { |
- class DeferredInstanceOfKnownGlobal final : public LDeferredCode { |
- public: |
- DeferredInstanceOfKnownGlobal(LCodeGen* codegen, |
- LInstanceOfKnownGlobal* instr) |
- : LDeferredCode(codegen), instr_(instr) { } |
- void Generate() override { |
- codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); |
- } |
- LInstruction* instr() override { return instr_; } |
- Label* map_check() { return &map_check_; } |
- private: |
- LInstanceOfKnownGlobal* instr_; |
- Label map_check_; |
- }; |
- |
- DCHECK(ToRegister(instr->context()).is(rsi)); |
- DeferredInstanceOfKnownGlobal* deferred; |
- deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); |
- |
- Label done, false_result; |
- Register object = ToRegister(instr->value()); |
- |
- // A Smi is not an instance of anything. |
- __ JumpIfSmi(object, &false_result, Label::kNear); |
- |
- // This is the inlined call site instanceof cache. The two occurences of the |
- // hole value will be patched to the last map/result pair generated by the |
- // instanceof stub. |
- Label cache_miss; |
- // Use a temp register to avoid memory operands with variable lengths. |
- Register map = ToRegister(instr->temp()); |
- __ movp(map, FieldOperand(object, HeapObject::kMapOffset)); |
- __ bind(deferred->map_check()); // Label for calculating code patching. |
- Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); |
- __ Move(kScratchRegister, cache_cell, RelocInfo::CELL); |
- __ cmpp(map, Operand(kScratchRegister, 0)); |
- __ j(not_equal, &cache_miss, Label::kNear); |
- // Patched to load either true or false. |
- __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex); |
-#ifdef DEBUG |
- // Check that the code size between patch label and patch sites is invariant. |
- Label end_of_patched_code; |
- __ bind(&end_of_patched_code); |
- DCHECK(true); |
-#endif |
- __ jmp(&done, Label::kNear); |
- |
- // The inlined call site cache did not match. Check for null and string |
- // before calling the deferred code. |
- __ bind(&cache_miss); // Null is not an instance of anything. |
- __ CompareRoot(object, Heap::kNullValueRootIndex); |
- __ j(equal, &false_result, Label::kNear); |
- |
- // String values are not instances of anything. |
- __ JumpIfNotString(object, kScratchRegister, deferred->entry()); |
- |
- __ bind(&false_result); |
- __ LoadRoot(ToRegister(instr->result()), Heap::kFalseValueRootIndex); |
- |
- __ bind(deferred->exit()); |
- __ bind(&done); |
-} |
- |
+void LCodeGen::DoHasInPrototypeChainAndBranch( |
+ LHasInPrototypeChainAndBranch* instr) { |
+ Register const object = ToRegister(instr->object()); |
+ Register const object_map = kScratchRegister; |
+ Register const object_prototype = object_map; |
+ Register const prototype = ToRegister(instr->prototype()); |
-void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, |
- Label* map_check) { |
- { |
- PushSafepointRegistersScope scope(this); |
- InstanceofStub::Flags flags = static_cast<InstanceofStub::Flags>( |
- InstanceofStub::kNoFlags | InstanceofStub::kCallSiteInlineCheck); |
- InstanceofStub stub(isolate(), flags); |
- |
- __ Push(ToRegister(instr->value())); |
- __ Push(instr->function()); |
- |
- static const int kAdditionalDelta = kPointerSize == kInt64Size ? 10 : 16; |
- int delta = |
- masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; |
- DCHECK(delta >= 0); |
- __ PushImm32(delta); |
- |
- // We are pushing three values on the stack but recording a |
- // safepoint with two arguments because stub is going to |
- // remove the third argument from the stack before jumping |
- // to instanceof builtin on the slow path. |
- CallCodeGeneric(stub.GetCode(), |
- RelocInfo::CODE_TARGET, |
- instr, |
- RECORD_SAFEPOINT_WITH_REGISTERS, |
- 2); |
- DCHECK(delta == masm_->SizeOfCodeGeneratedSince(map_check)); |
- LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); |
- safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); |
- // Move result to a register that survives the end of the |
- // PushSafepointRegisterScope. |
- __ movp(kScratchRegister, rax); |
+ // The {object} must be a spec object. It's sufficient to know that {object} |
+ // is not a smi, since all other non-spec objects have {null} prototypes and |
+ // will be ruled out below. |
+ if (instr->hydrogen()->ObjectNeedsSmiCheck()) { |
+ Condition is_smi = __ CheckSmi(object); |
+ EmitFalseBranch(instr, is_smi); |
} |
- __ testp(kScratchRegister, kScratchRegister); |
- Label load_false; |
- Label done; |
- __ j(not_zero, &load_false, Label::kNear); |
- __ LoadRoot(rax, Heap::kTrueValueRootIndex); |
- __ jmp(&done, Label::kNear); |
- __ bind(&load_false); |
- __ LoadRoot(rax, Heap::kFalseValueRootIndex); |
- __ bind(&done); |
+ |
+ // Loop through the {object}s prototype chain looking for the {prototype}. |
+ __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset)); |
+ Label loop; |
+ __ bind(&loop); |
+ __ movp(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); |
+ __ cmpp(object_prototype, prototype); |
+ EmitTrueBranch(instr, equal); |
+ __ CompareRoot(object_prototype, Heap::kNullValueRootIndex); |
+ EmitFalseBranch(instr, equal); |
+ __ movp(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); |
+ __ jmp(&loop); |
} |