| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #if V8_TARGET_ARCH_IA32 | 5 #if V8_TARGET_ARCH_IA32 |
| 6 | 6 |
| 7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
| 8 #include "src/code-factory.h" | 8 #include "src/code-factory.h" |
| 9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
| 10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
| (...skipping 2037 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2048 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block)); | 2048 __ j(NegateCondition(cc), chunk_->GetAssemblyLabel(right_block)); |
| 2049 } else if (right_block == next_block) { | 2049 } else if (right_block == next_block) { |
| 2050 __ j(cc, chunk_->GetAssemblyLabel(left_block)); | 2050 __ j(cc, chunk_->GetAssemblyLabel(left_block)); |
| 2051 } else { | 2051 } else { |
| 2052 __ j(cc, chunk_->GetAssemblyLabel(left_block)); | 2052 __ j(cc, chunk_->GetAssemblyLabel(left_block)); |
| 2053 __ jmp(chunk_->GetAssemblyLabel(right_block)); | 2053 __ jmp(chunk_->GetAssemblyLabel(right_block)); |
| 2054 } | 2054 } |
| 2055 } | 2055 } |
| 2056 | 2056 |
| 2057 | 2057 |
| 2058 template <class InstrType> |
| 2059 void LCodeGen::EmitTrueBranch(InstrType instr, Condition cc) { |
| 2060 int true_block = instr->TrueDestination(chunk_); |
| 2061 if (cc == no_condition) { |
| 2062 __ jmp(chunk_->GetAssemblyLabel(true_block)); |
| 2063 } else { |
| 2064 __ j(cc, chunk_->GetAssemblyLabel(true_block)); |
| 2065 } |
| 2066 } |
| 2067 |
| 2068 |
| 2058 template<class InstrType> | 2069 template<class InstrType> |
| 2059 void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) { | 2070 void LCodeGen::EmitFalseBranch(InstrType instr, Condition cc) { |
| 2060 int false_block = instr->FalseDestination(chunk_); | 2071 int false_block = instr->FalseDestination(chunk_); |
| 2061 if (cc == no_condition) { | 2072 if (cc == no_condition) { |
| 2062 __ jmp(chunk_->GetAssemblyLabel(false_block)); | 2073 __ jmp(chunk_->GetAssemblyLabel(false_block)); |
| 2063 } else { | 2074 } else { |
| 2064 __ j(cc, chunk_->GetAssemblyLabel(false_block)); | 2075 __ j(cc, chunk_->GetAssemblyLabel(false_block)); |
| 2065 } | 2076 } |
| 2066 } | 2077 } |
| 2067 | 2078 |
| (...skipping 539 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2607 | 2618 |
| 2608 | 2619 |
| 2609 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { | 2620 void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { |
| 2610 Register reg = ToRegister(instr->value()); | 2621 Register reg = ToRegister(instr->value()); |
| 2611 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); | 2622 __ cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); |
| 2612 EmitBranch(instr, equal); | 2623 EmitBranch(instr, equal); |
| 2613 } | 2624 } |
| 2614 | 2625 |
| 2615 | 2626 |
| 2616 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2627 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
| 2617 // Object and function are in fixed registers defined by the stub. | |
| 2618 DCHECK(ToRegister(instr->context()).is(esi)); | 2628 DCHECK(ToRegister(instr->context()).is(esi)); |
| 2619 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); | 2629 DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister())); |
| 2630 DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister())); |
| 2631 DCHECK(ToRegister(instr->result()).is(eax)); |
| 2632 InstanceOfStub stub(isolate()); |
| 2620 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2633 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
| 2621 | |
| 2622 Label true_value, done; | |
| 2623 __ test(eax, Operand(eax)); | |
| 2624 __ j(zero, &true_value, Label::kNear); | |
| 2625 __ mov(ToRegister(instr->result()), factory()->false_value()); | |
| 2626 __ jmp(&done, Label::kNear); | |
| 2627 __ bind(&true_value); | |
| 2628 __ mov(ToRegister(instr->result()), factory()->true_value()); | |
| 2629 __ bind(&done); | |
| 2630 } | 2634 } |
| 2631 | 2635 |
| 2632 | 2636 |
| 2633 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 2637 void LCodeGen::DoHasInPrototypeChainAndBranch( |
| 2634 class DeferredInstanceOfKnownGlobal final : public LDeferredCode { | 2638 LHasInPrototypeChainAndBranch* instr) { |
| 2635 public: | 2639 Register const object = ToRegister(instr->object()); |
| 2636 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 2640 Register const object_map = ToRegister(instr->scratch()); |
| 2637 LInstanceOfKnownGlobal* instr) | 2641 Register const object_prototype = object_map; |
| 2638 : LDeferredCode(codegen), instr_(instr) { } | 2642 Register const prototype = ToRegister(instr->prototype()); |
| 2639 void Generate() override { | |
| 2640 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); | |
| 2641 } | |
| 2642 LInstruction* instr() override { return instr_; } | |
| 2643 Label* map_check() { return &map_check_; } | |
| 2644 private: | |
| 2645 LInstanceOfKnownGlobal* instr_; | |
| 2646 Label map_check_; | |
| 2647 }; | |
| 2648 | 2643 |
| 2649 DeferredInstanceOfKnownGlobal* deferred; | 2644 // The {object} must be a spec object. It's sufficient to know that {object} |
| 2650 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); | 2645 // is not a smi, since all other non-spec objects have {null} prototypes and |
| 2646 // will be ruled out below. |
| 2647 if (instr->hydrogen()->ObjectNeedsSmiCheck()) { |
| 2648 __ test(object, Immediate(kSmiTagMask)); |
| 2649 EmitFalseBranch(instr, zero); |
| 2650 } |
| 2651 | 2651 |
| 2652 Label done, false_result; | 2652 // Loop through the {object}s prototype chain looking for the {prototype}. |
| 2653 Register object = ToRegister(instr->value()); | 2653 __ mov(object_map, FieldOperand(object, HeapObject::kMapOffset)); |
| 2654 Register temp = ToRegister(instr->temp()); | 2654 Label loop; |
| 2655 | 2655 __ bind(&loop); |
| 2656 // A Smi is not an instance of anything. | 2656 __ mov(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset)); |
| 2657 __ JumpIfSmi(object, &false_result, Label::kNear); | 2657 __ cmp(object_prototype, prototype); |
| 2658 | 2658 EmitTrueBranch(instr, equal); |
| 2659 // This is the inlined call site instanceof cache. The two occurences of the | 2659 __ cmp(object_prototype, factory()->null_value()); |
| 2660 // hole value will be patched to the last map/result pair generated by the | 2660 EmitFalseBranch(instr, equal); |
| 2661 // instanceof stub. | 2661 __ mov(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset)); |
| 2662 Label cache_miss; | 2662 __ jmp(&loop); |
| 2663 Register map = ToRegister(instr->temp()); | |
| 2664 __ mov(map, FieldOperand(object, HeapObject::kMapOffset)); | |
| 2665 __ bind(deferred->map_check()); // Label for calculating code patching. | |
| 2666 Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); | |
| 2667 __ cmp(map, Operand::ForCell(cache_cell)); // Patched to cached map. | |
| 2668 __ j(not_equal, &cache_miss, Label::kNear); | |
| 2669 __ mov(eax, factory()->the_hole_value()); // Patched to either true or false. | |
| 2670 __ jmp(&done, Label::kNear); | |
| 2671 | |
| 2672 // The inlined call site cache did not match. Check for null and string | |
| 2673 // before calling the deferred code. | |
| 2674 __ bind(&cache_miss); | |
| 2675 // Null is not an instance of anything. | |
| 2676 __ cmp(object, factory()->null_value()); | |
| 2677 __ j(equal, &false_result, Label::kNear); | |
| 2678 | |
| 2679 // String values are not instances of anything. | |
| 2680 Condition is_string = masm_->IsObjectStringType(object, temp, temp); | |
| 2681 __ j(is_string, &false_result, Label::kNear); | |
| 2682 | |
| 2683 // Go to the deferred code. | |
| 2684 __ jmp(deferred->entry()); | |
| 2685 | |
| 2686 __ bind(&false_result); | |
| 2687 __ mov(ToRegister(instr->result()), factory()->false_value()); | |
| 2688 | |
| 2689 // Here result has either true or false. Deferred code also produces true or | |
| 2690 // false object. | |
| 2691 __ bind(deferred->exit()); | |
| 2692 __ bind(&done); | |
| 2693 } | 2663 } |
| 2694 | 2664 |
| 2695 | 2665 |
| 2696 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | |
| 2697 Label* map_check) { | |
| 2698 PushSafepointRegistersScope scope(this); | |
| 2699 | |
| 2700 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | |
| 2701 flags = static_cast<InstanceofStub::Flags>( | |
| 2702 flags | InstanceofStub::kArgsInRegisters); | |
| 2703 flags = static_cast<InstanceofStub::Flags>( | |
| 2704 flags | InstanceofStub::kCallSiteInlineCheck); | |
| 2705 flags = static_cast<InstanceofStub::Flags>( | |
| 2706 flags | InstanceofStub::kReturnTrueFalseObject); | |
| 2707 InstanceofStub stub(isolate(), flags); | |
| 2708 | |
| 2709 // Get the temp register reserved by the instruction. This needs to be a | |
| 2710 // register which is pushed last by PushSafepointRegisters as top of the | |
| 2711 // stack is used to pass the offset to the location of the map check to | |
| 2712 // the stub. | |
| 2713 Register temp = ToRegister(instr->temp()); | |
| 2714 DCHECK(MacroAssembler::SafepointRegisterStackIndex(temp) == 0); | |
| 2715 __ LoadHeapObject(InstanceofStub::right(), instr->function()); | |
| 2716 static const int kAdditionalDelta = 13; | |
| 2717 int delta = masm_->SizeOfCodeGeneratedSince(map_check) + kAdditionalDelta; | |
| 2718 __ mov(temp, Immediate(delta)); | |
| 2719 __ StoreToSafepointRegisterSlot(temp, temp); | |
| 2720 CallCodeGeneric(stub.GetCode(), | |
| 2721 RelocInfo::CODE_TARGET, | |
| 2722 instr, | |
| 2723 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
| 2724 // Get the deoptimization index of the LLazyBailout-environment that | |
| 2725 // corresponds to this instruction. | |
| 2726 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | |
| 2727 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
| 2728 | |
| 2729 // Put the result value into the eax slot and restore all registers. | |
| 2730 __ StoreToSafepointRegisterSlot(eax, eax); | |
| 2731 } | |
| 2732 | |
| 2733 | |
| 2734 void LCodeGen::DoCmpT(LCmpT* instr) { | 2666 void LCodeGen::DoCmpT(LCmpT* instr) { |
| 2735 Token::Value op = instr->op(); | 2667 Token::Value op = instr->op(); |
| 2736 | 2668 |
| 2737 Handle<Code> ic = | 2669 Handle<Code> ic = |
| 2738 CodeFactory::CompareIC(isolate(), op, instr->strength()).code(); | 2670 CodeFactory::CompareIC(isolate(), op, instr->strength()).code(); |
| 2739 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2671 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
| 2740 | 2672 |
| 2741 Condition condition = ComputeCompareCondition(op); | 2673 Condition condition = ComputeCompareCondition(op); |
| 2742 Label true_value, done; | 2674 Label true_value, done; |
| 2743 __ test(eax, Operand(eax)); | 2675 __ test(eax, Operand(eax)); |
| (...skipping 3108 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5852 RecordSafepoint(Safepoint::kNoLazyDeopt); | 5784 RecordSafepoint(Safepoint::kNoLazyDeopt); |
| 5853 } | 5785 } |
| 5854 | 5786 |
| 5855 | 5787 |
| 5856 #undef __ | 5788 #undef __ |
| 5857 | 5789 |
| 5858 } // namespace internal | 5790 } // namespace internal |
| 5859 } // namespace v8 | 5791 } // namespace v8 |
| 5860 | 5792 |
| 5861 #endif // V8_TARGET_ARCH_IA32 | 5793 #endif // V8_TARGET_ARCH_IA32 |
| OLD | NEW |