OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/base/bits.h" | 5 #include "src/base/bits.h" |
6 #include "src/code-factory.h" | 6 #include "src/code-factory.h" |
7 #include "src/code-stubs.h" | 7 #include "src/code-stubs.h" |
8 #include "src/cpu-profiler.h" | 8 #include "src/cpu-profiler.h" |
9 #include "src/hydrogen-osr.h" | 9 #include "src/hydrogen-osr.h" |
10 #include "src/ic/ic.h" | 10 #include "src/ic/ic.h" |
(...skipping 2162 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2173 } else if (right_block == next_block) { | 2173 } else if (right_block == next_block) { |
2174 __ b(cond, chunk_->GetAssemblyLabel(left_block), cr); | 2174 __ b(cond, chunk_->GetAssemblyLabel(left_block), cr); |
2175 } else { | 2175 } else { |
2176 __ b(cond, chunk_->GetAssemblyLabel(left_block), cr); | 2176 __ b(cond, chunk_->GetAssemblyLabel(left_block), cr); |
2177 __ b(chunk_->GetAssemblyLabel(right_block)); | 2177 __ b(chunk_->GetAssemblyLabel(right_block)); |
2178 } | 2178 } |
2179 } | 2179 } |
2180 | 2180 |
2181 | 2181 |
2182 template <class InstrType> | 2182 template <class InstrType> |
| 2183 void LCodeGen::EmitTrueBranch(InstrType instr, Condition cond, CRegister cr) { |
| 2184 int true_block = instr->TrueDestination(chunk_); |
| 2185 __ b(cond, chunk_->GetAssemblyLabel(true_block), cr); |
| 2186 } |
| 2187 |
| 2188 |
| 2189 template <class InstrType> |
2183 void LCodeGen::EmitFalseBranch(InstrType instr, Condition cond, CRegister cr) { | 2190 void LCodeGen::EmitFalseBranch(InstrType instr, Condition cond, CRegister cr) { |
2184 int false_block = instr->FalseDestination(chunk_); | 2191 int false_block = instr->FalseDestination(chunk_); |
2185 __ b(cond, chunk_->GetAssemblyLabel(false_block), cr); | 2192 __ b(cond, chunk_->GetAssemblyLabel(false_block), cr); |
2186 } | 2193 } |
2187 | 2194 |
2188 | 2195 |
2189 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { __ stop("LBreak"); } | 2196 void LCodeGen::DoDebugBreak(LDebugBreak* instr) { __ stop("LBreak"); } |
2190 | 2197 |
2191 | 2198 |
2192 void LCodeGen::DoBranch(LBranch* instr) { | 2199 void LCodeGen::DoBranch(LBranch* instr) { |
(...skipping 559 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2752 Register temp = ToRegister(instr->temp()); | 2759 Register temp = ToRegister(instr->temp()); |
2753 | 2760 |
2754 __ LoadP(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); | 2761 __ LoadP(temp, FieldMemOperand(reg, HeapObject::kMapOffset)); |
2755 __ Cmpi(temp, Operand(instr->map()), r0); | 2762 __ Cmpi(temp, Operand(instr->map()), r0); |
2756 EmitBranch(instr, eq); | 2763 EmitBranch(instr, eq); |
2757 } | 2764 } |
2758 | 2765 |
2759 | 2766 |
2760 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { | 2767 void LCodeGen::DoInstanceOf(LInstanceOf* instr) { |
2761 DCHECK(ToRegister(instr->context()).is(cp)); | 2768 DCHECK(ToRegister(instr->context()).is(cp)); |
2762 DCHECK(ToRegister(instr->left()).is(r3)); // Object is in r3. | 2769 DCHECK(ToRegister(instr->left()).is(InstanceOfDescriptor::LeftRegister())); |
2763 DCHECK(ToRegister(instr->right()).is(r4)); // Function is in r4. | 2770 DCHECK(ToRegister(instr->right()).is(InstanceOfDescriptor::RightRegister())); |
2764 | 2771 DCHECK(ToRegister(instr->result()).is(r3)); |
2765 InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters); | 2772 InstanceOfStub stub(isolate()); |
2766 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); | 2773 CallCode(stub.GetCode(), RelocInfo::CODE_TARGET, instr); |
2767 | |
2768 if (CpuFeatures::IsSupported(ISELECT)) { | |
2769 __ mov(r4, Operand(factory()->true_value())); | |
2770 __ mov(r5, Operand(factory()->false_value())); | |
2771 __ cmpi(r3, Operand::Zero()); | |
2772 __ isel(eq, r3, r4, r5); | |
2773 } else { | |
2774 Label equal, done; | |
2775 __ cmpi(r3, Operand::Zero()); | |
2776 __ beq(&equal); | |
2777 __ mov(r3, Operand(factory()->false_value())); | |
2778 __ b(&done); | |
2779 | |
2780 __ bind(&equal); | |
2781 __ mov(r3, Operand(factory()->true_value())); | |
2782 __ bind(&done); | |
2783 } | |
2784 } | 2774 } |
2785 | 2775 |
2786 | 2776 |
2787 void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { | 2777 void LCodeGen::DoHasInPrototypeChainAndBranch( |
2788 class DeferredInstanceOfKnownGlobal final : public LDeferredCode { | 2778 LHasInPrototypeChainAndBranch* instr) { |
2789 public: | 2779 Register const object = ToRegister(instr->object()); |
2790 DeferredInstanceOfKnownGlobal(LCodeGen* codegen, | 2780 Register const object_map = scratch0(); |
2791 LInstanceOfKnownGlobal* instr) | 2781 Register const object_prototype = object_map; |
2792 : LDeferredCode(codegen), instr_(instr) {} | 2782 Register const prototype = ToRegister(instr->prototype()); |
2793 void Generate() override { | |
2794 codegen()->DoDeferredInstanceOfKnownGlobal(instr_, &map_check_, | |
2795 &load_bool_); | |
2796 } | |
2797 LInstruction* instr() override { return instr_; } | |
2798 Label* map_check() { return &map_check_; } | |
2799 Label* load_bool() { return &load_bool_; } | |
2800 | 2783 |
2801 private: | 2784 // The {object} must be a spec object. It's sufficient to know that {object} |
2802 LInstanceOfKnownGlobal* instr_; | 2785 // is not a smi, since all other non-spec objects have {null} prototypes and |
2803 Label map_check_; | 2786 // will be ruled out below. |
2804 Label load_bool_; | 2787 if (instr->hydrogen()->ObjectNeedsSmiCheck()) { |
2805 }; | 2788 __ TestIfSmi(object, r0); |
| 2789 EmitFalseBranch(instr, eq, cr0); |
| 2790 } |
2806 | 2791 |
2807 DeferredInstanceOfKnownGlobal* deferred; | 2792 // Loop through the {object}s prototype chain looking for the {prototype}. |
2808 deferred = new (zone()) DeferredInstanceOfKnownGlobal(this, instr); | 2793 __ LoadP(object_map, FieldMemOperand(object, HeapObject::kMapOffset)); |
2809 | 2794 Label loop; |
2810 Label done, false_result; | 2795 __ bind(&loop); |
2811 Register object = ToRegister(instr->value()); | 2796 __ LoadP(object_prototype, |
2812 Register temp = ToRegister(instr->temp()); | 2797 FieldMemOperand(object_map, Map::kPrototypeOffset)); |
2813 Register result = ToRegister(instr->result()); | 2798 __ cmp(object_prototype, prototype); |
2814 | 2799 EmitTrueBranch(instr, eq); |
2815 // A Smi is not instance of anything. | 2800 __ CompareRoot(object_prototype, Heap::kNullValueRootIndex); |
2816 __ JumpIfSmi(object, &false_result); | 2801 EmitFalseBranch(instr, eq); |
2817 | 2802 __ LoadP(object_map, |
2818 // This is the inlined call site instanceof cache. The two occurences of the | 2803 FieldMemOperand(object_prototype, HeapObject::kMapOffset)); |
2819 // hole value will be patched to the last map/result pair generated by the | 2804 __ b(&loop); |
2820 // instanceof stub. | |
2821 Label cache_miss; | |
2822 Register map = temp; | |
2823 __ LoadP(map, FieldMemOperand(object, HeapObject::kMapOffset)); | |
2824 { | |
2825 // Block trampoline emission to ensure the positions of instructions are | |
2826 // as expected by the patcher. See InstanceofStub::Generate(). | |
2827 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
2828 __ bind(deferred->map_check()); // Label for calculating code patching. | |
2829 // We use Factory::the_hole_value() on purpose instead of loading from the | |
2830 // root array to force relocation to be able to later patch with | |
2831 // the cached map. | |
2832 Handle<Cell> cell = factory()->NewCell(factory()->the_hole_value()); | |
2833 __ mov(ip, Operand(cell)); | |
2834 __ LoadP(ip, FieldMemOperand(ip, Cell::kValueOffset)); | |
2835 __ cmp(map, ip); | |
2836 __ bc_short(ne, &cache_miss); | |
2837 __ bind(deferred->load_bool()); // Label for calculating code patching. | |
2838 // We use Factory::the_hole_value() on purpose instead of loading from the | |
2839 // root array to force relocation to be able to later patch | |
2840 // with true or false. | |
2841 __ mov(result, Operand(factory()->the_hole_value())); | |
2842 } | |
2843 __ b(&done); | |
2844 | |
2845 // The inlined call site cache did not match. Check null and string before | |
2846 // calling the deferred code. | |
2847 __ bind(&cache_miss); | |
2848 // Null is not instance of anything. | |
2849 __ LoadRoot(ip, Heap::kNullValueRootIndex); | |
2850 __ cmp(object, ip); | |
2851 __ beq(&false_result); | |
2852 | |
2853 // String values is not instance of anything. | |
2854 Condition is_string = masm_->IsObjectStringType(object, temp); | |
2855 __ b(is_string, &false_result, cr0); | |
2856 | |
2857 // Go to the deferred code. | |
2858 __ b(deferred->entry()); | |
2859 | |
2860 __ bind(&false_result); | |
2861 __ LoadRoot(result, Heap::kFalseValueRootIndex); | |
2862 | |
2863 // Here result has either true or false. Deferred code also produces true or | |
2864 // false object. | |
2865 __ bind(deferred->exit()); | |
2866 __ bind(&done); | |
2867 } | 2805 } |
2868 | 2806 |
2869 | 2807 |
2870 void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, | |
2871 Label* map_check, | |
2872 Label* bool_load) { | |
2873 InstanceofStub::Flags flags = InstanceofStub::kNoFlags; | |
2874 flags = static_cast<InstanceofStub::Flags>(flags | | |
2875 InstanceofStub::kArgsInRegisters); | |
2876 flags = static_cast<InstanceofStub::Flags>( | |
2877 flags | InstanceofStub::kCallSiteInlineCheck); | |
2878 flags = static_cast<InstanceofStub::Flags>( | |
2879 flags | InstanceofStub::kReturnTrueFalseObject); | |
2880 InstanceofStub stub(isolate(), flags); | |
2881 | |
2882 PushSafepointRegistersScope scope(this); | |
2883 LoadContextFromDeferred(instr->context()); | |
2884 | |
2885 __ Move(InstanceofStub::right(), instr->function()); | |
2886 { | |
2887 Assembler::BlockTrampolinePoolScope block_trampoline_pool(masm_); | |
2888 Handle<Code> code = stub.GetCode(); | |
2889 // Include instructions below in delta: bitwise_mov32 + li + call | |
2890 int additional_delta = 3 * Instruction::kInstrSize + masm_->CallSize(code); | |
2891 // The labels must be already bound since the code has predictabel size up | |
2892 // to the call instruction. | |
2893 DCHECK(map_check->is_bound()); | |
2894 DCHECK(bool_load->is_bound()); | |
2895 int map_check_delta = | |
2896 masm_->InstructionsGeneratedSince(map_check) * Instruction::kInstrSize; | |
2897 int bool_load_delta = | |
2898 masm_->InstructionsGeneratedSince(bool_load) * Instruction::kInstrSize; | |
2899 // r8 is the delta from our callee's lr to the location of the map check. | |
2900 __ bitwise_mov32(r8, map_check_delta + additional_delta); | |
2901 // r9 is the delta from map check to bool load. | |
2902 __ li(r9, Operand(map_check_delta - bool_load_delta)); | |
2903 CallCodeGeneric(code, RelocInfo::CODE_TARGET, instr, | |
2904 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS); | |
2905 DCHECK_EQ((map_check_delta + additional_delta) / Instruction::kInstrSize, | |
2906 masm_->InstructionsGeneratedSince(map_check)); | |
2907 } | |
2908 LEnvironment* env = instr->GetDeferredLazyDeoptimizationEnvironment(); | |
2909 safepoints_.RecordLazyDeoptimizationIndex(env->deoptimization_index()); | |
2910 // Put the result value (r3) into the result register slot and | |
2911 // restore all registers. | |
2912 __ StoreToSafepointRegisterSlot(r3, ToRegister(instr->result())); | |
2913 } | |
2914 | |
2915 | |
2916 void LCodeGen::DoCmpT(LCmpT* instr) { | 2808 void LCodeGen::DoCmpT(LCmpT* instr) { |
2917 DCHECK(ToRegister(instr->context()).is(cp)); | 2809 DCHECK(ToRegister(instr->context()).is(cp)); |
2918 Token::Value op = instr->op(); | 2810 Token::Value op = instr->op(); |
2919 | 2811 |
2920 Handle<Code> ic = | 2812 Handle<Code> ic = |
2921 CodeFactory::CompareIC(isolate(), op, instr->strength()).code(); | 2813 CodeFactory::CompareIC(isolate(), op, instr->strength()).code(); |
2922 CallCode(ic, RelocInfo::CODE_TARGET, instr); | 2814 CallCode(ic, RelocInfo::CODE_TARGET, instr); |
2923 // This instruction also signals no smi code inlined | 2815 // This instruction also signals no smi code inlined |
2924 __ cmpi(r3, Operand::Zero()); | 2816 __ cmpi(r3, Operand::Zero()); |
2925 | 2817 |
(...skipping 3339 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6265 __ Push(scope_info); | 6157 __ Push(scope_info); |
6266 __ push(ToRegister(instr->function())); | 6158 __ push(ToRegister(instr->function())); |
6267 CallRuntime(Runtime::kPushBlockContext, 2, instr); | 6159 CallRuntime(Runtime::kPushBlockContext, 2, instr); |
6268 RecordSafepoint(Safepoint::kNoLazyDeopt); | 6160 RecordSafepoint(Safepoint::kNoLazyDeopt); |
6269 } | 6161 } |
6270 | 6162 |
6271 | 6163 |
6272 #undef __ | 6164 #undef __ |
6273 } // namespace internal | 6165 } // namespace internal |
6274 } // namespace v8 | 6166 } // namespace v8 |
OLD | NEW |