OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/code-factory.h" | 5 #include "src/code-factory.h" |
6 #include "src/code-stubs.h" | 6 #include "src/code-stubs.h" |
7 #include "src/cpu-profiler.h" | 7 #include "src/cpu-profiler.h" |
8 #include "src/hydrogen-osr.h" | 8 #include "src/hydrogen-osr.h" |
9 #include "src/ic/ic.h" | 9 #include "src/ic/ic.h" |
10 #include "src/ic/stub-cache.h" | 10 #include "src/ic/stub-cache.h" |
(...skipping 2140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2151 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, | 2151 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, |
2152 condition, src1, src2); | 2152 condition, src1, src2); |
2153 } else { | 2153 } else { |
2154 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, | 2154 __ BranchF(chunk_->GetAssemblyLabel(left_block), NULL, |
2155 condition, src1, src2); | 2155 condition, src1, src2); |
2156 __ Branch(chunk_->GetAssemblyLabel(right_block)); | 2156 __ Branch(chunk_->GetAssemblyLabel(right_block)); |
2157 } | 2157 } |
2158 } | 2158 } |
2159 | 2159 |
2160 | 2160 |
2161 template<class InstrType> | 2161 template <class InstrType> |
2162 void LCodeGen::EmitFalseBranch(InstrType instr, | 2162 void LCodeGen::EmitTrueBranch(InstrType instr, Condition condition, |
2163 Condition condition, | 2163 Register src1, const Operand& src2) { |
2164 Register src1, | 2164 int true_block = instr->TrueDestination(chunk_); |
2165 const Operand& src2) { | 2165 __ Branch(chunk_->GetAssemblyLabel(true_block), condition, src1, src2); |
| 2166 } |
| 2167 |
| 2168 |
| 2169 template <class InstrType> |
| 2170 void LCodeGen::EmitFalseBranch(InstrType instr, Condition condition, |
| 2171 Register src1, const Operand& src2) { |
2166 int false_block = instr->FalseDestination(chunk_); | 2172 int false_block = instr->FalseDestination(chunk_); |
2167 __ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2); | 2173 __ Branch(chunk_->GetAssemblyLabel(false_block), condition, src1, src2); |
2168 } | 2174 } |
2169 | 2175 |
2170 | 2176 |
2171 template<class InstrType> | 2177 template<class InstrType> |
2172 void LCodeGen::EmitFalseBranchF(InstrType instr, | 2178 void LCodeGen::EmitFalseBranchF(InstrType instr, |
2173 Condition condition, | 2179 Condition condition, |
2174 FPURegister src1, | 2180 FPURegister src1, |
2175 FPURegister src2) { | 2181 FPURegister src2) { |
(...skipping 573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2749 Register temp = ToRegister(instr->temp()); | 2755 Register temp = ToRegister(instr->temp()); |
2750 | 2756 |
2751 __ ld(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); | 2757 __ ld(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); |
2752 EmitBranch(instr, eq, temp, Operand(instr->map())); | 2758 EmitBranch(instr, eq, temp, Operand(instr->map())); |
2753 } | 2759 } |
2754 | 2760 |
2755 | 2761 |
2756 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2762 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
2757 DCHECK(ToRegister(instr->context()).is(cp)); | 2763 DCHECK(ToRegister(instr->context()).is(cp)); |
2758 Label true_label, done; | 2764 Label true_label, done; |
2759 DCHECK(ToRegister(instr->left()).is(a0)); // Object is in a0. | 2765 DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister())); |
2760 DCHECK(ToRegister(instr->right()).is(a1)); // Function is in a1. | 2766 DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister())); |
2761 Register result = ToRegister(instr->result()); | 2767 DCHECK(ToRegister(instr->result()).is(v0)); |
2762 DCHECK(result.is(v0)); | |
2763 | 2768 |
2764 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); | 2769 InstanceOfStub stub(isolate()); |
2765 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2770 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2766 | |
2767 __ Branch(&true_label, eq, result, Operand(zero_reg)); | |
2768 __ li(result, Operand(factory()->false_value())); | |
2769 __ Branch(&done); | |
2770 __ bind(&true_label); | |
2771 __ li(result, Operand(factory()->true_value())); | |
2772 __ bind(&done); | |
2773 } | 2771 } |
2774 | 2772 |
2775 | 2773 |
2776 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 2774 void LCodeGen::DoHasInPrototypeChainAndBranch( |
2777 class DeferredInstanceOfKnownGlobal final : public LDeferredCode { | 2775 LHasInPrototypeChainAndBranch* instr) { |
2778 public: | 2776 Register const object = ToRegister(instr->object()); |
2779 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 2777 Register const object_map = scratch0(); |
2780 LInstanceOfKnownGlobal* instr) | 2778 Register const object_prototype = object_map; |
2781 : LDeferredCode(codegen), instr_(instr) { } | 2779 Register const prototype = ToRegister(instr->prototype()); |
2782 void Generate() override { | |
2783 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_); | |
2784 } | |
2785 LInstruction* instr() override { return instr_; } | |
2786 Label* map_check() { return &map_check_; } | |
2787 | 2780 |
2788 private: | 2781 // The {object} must be a spec object. It's sufficient to know that {object} |
2789 LInstanceOfKnownGlobal* instr_; | 2782 // is not a smi, since all other non-spec objects have {null} prototypes and |
2790 Label map_check_; | 2783 // will be ruled out below. |
2791 }; | 2784 if (instr->hydrogen()->ObjectNeedsSmiCheck()) { |
| 2785 __ SmiTst(object, at); |
| 2786 EmitFalseBranch(instr, eq, at, Operand(zero_reg)); |
| 2787 } |
2792 | 2788 |
2793 DeferredInstanceOfKnownGlobal* deferred; | 2789 // Loop through the {object}s prototype chain looking for the {prototype}. |
2794 deferred = new(zone()) DeferredInstanceOfKnownGlobal(this, instr); | 2790 __ ld(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
2795 | 2791 Label loop; |
2796 Label done, false_result; | 2792 __ bind(&loop); |
2797 Register object = ToRegister(instr->value()); | 2793 __ ld(object_prototype, FieldMemOperand(object_map, Map::kPrototypeOffset)); |
2798 Register temp = ToRegister(instr->temp()); | 2794 EmitTrueBranch(instr, eq, object_prototype, Operand(prototype)); |
2799 Register result = ToRegister(instr->result()); | 2795 __ LoadRoot(at, Heap::kNullValueRootIndex); |
2800 | 2796 EmitFalseBranch(instr, eq, object_prototype, Operand(at)); |
2801 DCHECK(object.is(a0)); | 2797 __ Branch(&loop, USE_DELAY_SLOT); |
2802 DCHECK(result.is(v0)); | 2798 __ ld(object_map, FieldMemOperand(object_prototype, |
2803 | 2799 HeapObject::kMapOffset)); // In delay slot. |
2804 // A Smi is not instance of anything. | |
2805 __ JumpIfSmi(object, &false_result); | |
2806 | |
2807 // This is the inlined call site instanceof cache. The two occurences of the | |
2808 // hole value will be patched to the last map/result pair generated by the | |
2809 // instanceof stub. | |
2810 Label cache_miss; | |
2811 Register map = temp; | |
2812 __ ld(map, FieldMemOperand(object, HeapObject::kMapOffset)); | |
2813 | |
2814 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
2815 __ bind(deferred->map_check()); // Label for calculating code patching. | |
2816 // We use Factory::the_hole_value() on purpose instead of loading from the | |
2817 // root array to force relocation to be able to later patch with | |
2818 // the cached map. | |
2819 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); | |
2820 __ li(at, Operand(cell)); | |
2821 __ ld(at, FieldMemOperand(at, Cell::kValueOffset)); | |
2822 __ BranchShort(&cache_miss, ne, map, Operand(at)); | |
2823 // We use Factory::the_hole_value() on purpose instead of loading from the | |
2824 // root array to force relocation to be able to later patch | |
2825 // with true or false. The distance from map check has to be constant. | |
2826 __ li(result, Operand(factory()->the_hole_value())); | |
2827 __ Branch(&done); | |
2828 | |
2829 // The inlined call site cache did not match. Check null and string before | |
2830 // calling the deferred code. | |
2831 __ bind(&cache_miss); | |
2832 // Null is not instance of anything. | |
2833 __ LoadRoot(temp, Heap::kNullValueRootIndex); | |
2834 __ Branch(&false_result, eq, object, Operand(temp)); | |
2835 | |
2836 // String values is not instance of anything. | |
2837 Condition cc = __ IsObjectStringType(object, temp, temp); | |
2838 __ Branch(&false_result, cc, temp, Operand(zero_reg)); | |
2839 | |
2840 // Go to the deferred code. | |
2841 __ Branch(deferred->entry()); | |
2842 | |
2843 __ bind(&false_result); | |
2844 __ LoadRoot(result, Heap::kFalseValueRootIndex); | |
2845 | |
2846 // Here result has either true or false. Deferred code also produces true or | |
2847 // false object. | |
2848 __ bind(deferred->exit()); | |
2849 __ bind(&done); | |
2850 } | 2800 } |
2851 | 2801 |
2852 | 2802 |
2853 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | |
2854 Label* map_check) { | |
2855 Register result = ToRegister(instr->result()); | |
2856 DCHECK(result.is(v0)); | |
2857 | |
2858 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | |
2859 flags = static_cast<InstanceofStub::Flags>( | |
2860 flags | InstanceofStub::kArgsInRegisters); | |
2861 flags = static_cast<InstanceofStub::Flags>( | |
2862 flags | InstanceofStub::kCallSiteInlineCheck); | |
2863 flags = static_cast<InstanceofStub::Flags>( | |
2864 flags | InstanceofStub::kReturnTrueFalseObject); | |
2865 InstanceofStub stub(isolate(), flags); | |
2866 | |
2867 PushSafepointRegistersScope scope(this); | |
2868 LoadContextFromDeferred(instr->context()); | |
2869 | |
2870 // Get the temp register reserved by the instruction. This needs to be a4 as | |
2871 // its slot of the pushing of safepoint registers is used to communicate the | |
2872 // offset to the location of the map check. | |
2873 Register temp = ToRegister(instr->temp()); | |
2874 DCHECK(temp.is(a4)); | |
2875 __ li(InstanceofStub::right(), instr->function()); | |
2876 static const int kAdditionalDelta = 13; | |
2877 int delta = masm_->InstructionsGeneratedSince(map_check) + kAdditionalDelta; | |
2878 Label before_push_delta; | |
2879 __ bind(&before_push_delta); | |
2880 { | |
2881 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
2882 __ li(temp, Operand(delta * kIntSize), CONSTANT_SIZE); | |
2883 __ StoreToSafepointRegisterSlot(temp, temp); | |
2884 } | |
2885 CallCodeGeneric(stub.GetCode(), | |
2886 RelocInfo::CODE_TARGET, | |
2887 instr, | |
2888 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
2889 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | |
2890 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
2891 // Put the result value into the result register slot and | |
2892 // restore all registers. | |
2893 __ StoreToSafepointRegisterSlot(result, result); | |
2894 } | |
2895 | |
2896 | |
2897 void LCodeGen::DoCmpT(LCmpT* instr) { | 2803 void LCodeGen::DoCmpT(LCmpT* instr) { |
2898 DCHECK(ToRegister(instr->context()).is(cp)); | 2804 DCHECK(ToRegister(instr->context()).is(cp)); |
2899 Token::Value op = instr->op(); | 2805 Token::Value op = instr->op(); |
2900 | 2806 |
2901 Handle<Code> ic = | 2807 Handle<Code> ic = |
2902 CodeFactory::CompareIC(isolate(), op, instr->strength()).code(); | 2808 CodeFactory::CompareIC(isolate(), op, instr->strength()).code(); |
2903 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2809 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2904 // On MIPS there is no need for a "no inlined smi code" marker (nop). | 2810 // On MIPS there is no need for a "no inlined smi code" marker (nop). |
2905 | 2811 |
2906 Condition condition = ComputeCompareCondition(op); | 2812 Condition condition = ComputeCompareCondition(op); |
(...skipping 3341 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6248 __ Push(at, ToRegister(instr->function())); | 6154 __ Push(at, ToRegister(instr->function())); |
6249 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6155 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
6250 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6156 RecordSafepoint(Safepoint::kNoLazyDeopt); |
6251 } | 6157 } |
6252 | 6158 |
6253 | 6159 |
6254 #undef __ | 6160 #undef __ |
6255 | 6161 |
6256 } // namespace internal | 6162 } // namespace internal |
6257 } // namespace v8 | 6163 } // namespace v8 |
OLD | NEW |