| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // | 2 // |
| 3 // Use of this source code is governed by a BSD-style license that can be | 3 // Use of this source code is governed by a BSD-style license that can be |
| 4 // found in the LICENSE file. | 4 // found in the LICENSE file. |
| 5 | 5 |
| 6 #include "src/crankshaft/s390/lithium-codegen-s390.h" | 6 #include "src/crankshaft/s390/lithium-codegen-s390.h" |
| 7 | 7 |
| 8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
| 9 #include "src/code-factory.h" | 9 #include "src/code-factory.h" |
| 10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 59 } | 59 } |
| 60 | 60 |
| 61 void LCodeGen::SaveCallerDoubles() { | 61 void LCodeGen::SaveCallerDoubles() { |
| 62 DCHECK(info()->saves_caller_doubles()); | 62 DCHECK(info()->saves_caller_doubles()); |
| 63 DCHECK(NeedsEagerFrame()); | 63 DCHECK(NeedsEagerFrame()); |
| 64 Comment(";;; Save clobbered callee double registers"); | 64 Comment(";;; Save clobbered callee double registers"); |
| 65 int count = 0; | 65 int count = 0; |
| 66 BitVector* doubles = chunk()->allocated_double_registers(); | 66 BitVector* doubles = chunk()->allocated_double_registers(); |
| 67 BitVector::Iterator save_iterator(doubles); | 67 BitVector::Iterator save_iterator(doubles); |
| 68 while (!save_iterator.Done()) { | 68 while (!save_iterator.Done()) { |
| 69 __ std(DoubleRegister::from_code(save_iterator.Current()), | 69 __ StoreDouble(DoubleRegister::from_code(save_iterator.Current()), |
| 70 MemOperand(sp, count * kDoubleSize)); | 70 MemOperand(sp, count * kDoubleSize)); |
| 71 save_iterator.Advance(); | 71 save_iterator.Advance(); |
| 72 count++; | 72 count++; |
| 73 } | 73 } |
| 74 } | 74 } |
| 75 | 75 |
| 76 void LCodeGen::RestoreCallerDoubles() { | 76 void LCodeGen::RestoreCallerDoubles() { |
| 77 DCHECK(info()->saves_caller_doubles()); | 77 DCHECK(info()->saves_caller_doubles()); |
| 78 DCHECK(NeedsEagerFrame()); | 78 DCHECK(NeedsEagerFrame()); |
| 79 Comment(";;; Restore clobbered callee double registers"); | 79 Comment(";;; Restore clobbered callee double registers"); |
| 80 BitVector* doubles = chunk()->allocated_double_registers(); | 80 BitVector* doubles = chunk()->allocated_double_registers(); |
| 81 BitVector::Iterator save_iterator(doubles); | 81 BitVector::Iterator save_iterator(doubles); |
| 82 int count = 0; | 82 int count = 0; |
| 83 while (!save_iterator.Done()) { | 83 while (!save_iterator.Done()) { |
| 84 __ ld(DoubleRegister::from_code(save_iterator.Current()), | 84 __ LoadDouble(DoubleRegister::from_code(save_iterator.Current()), |
| 85 MemOperand(sp, count * kDoubleSize)); | 85 MemOperand(sp, count * kDoubleSize)); |
| 86 save_iterator.Advance(); | 86 save_iterator.Advance(); |
| 87 count++; | 87 count++; |
| 88 } | 88 } |
| 89 } | 89 } |
| 90 | 90 |
| 91 bool LCodeGen::GeneratePrologue() { | 91 bool LCodeGen::GeneratePrologue() { |
| 92 DCHECK(is_generating()); | 92 DCHECK(is_generating()); |
| 93 | 93 |
| 94 if (info()->IsOptimizing()) { | 94 if (info()->IsOptimizing()) { |
| 95 ProfileEntryHookStub::MaybeCallEntryHook(masm_); | 95 ProfileEntryHookStub::MaybeCallEntryHook(masm_); |
| (...skipping 1986 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2082 EmitBranch(instr, eq); | 2082 EmitBranch(instr, eq); |
| 2083 } else if (type.IsSmi()) { | 2083 } else if (type.IsSmi()) { |
| 2084 DCHECK(!info()->IsStub()); | 2084 DCHECK(!info()->IsStub()); |
| 2085 __ CmpP(reg, Operand::Zero()); | 2085 __ CmpP(reg, Operand::Zero()); |
| 2086 EmitBranch(instr, ne); | 2086 EmitBranch(instr, ne); |
| 2087 } else if (type.IsJSArray()) { | 2087 } else if (type.IsJSArray()) { |
| 2088 DCHECK(!info()->IsStub()); | 2088 DCHECK(!info()->IsStub()); |
| 2089 EmitBranch(instr, al); | 2089 EmitBranch(instr, al); |
| 2090 } else if (type.IsHeapNumber()) { | 2090 } else if (type.IsHeapNumber()) { |
| 2091 DCHECK(!info()->IsStub()); | 2091 DCHECK(!info()->IsStub()); |
| 2092 __ ld(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); | 2092 __ LoadDouble(dbl_scratch, |
| 2093 FieldMemOperand(reg, HeapNumber::kValueOffset)); |
| 2093 // Test the double value. Zero and NaN are false. | 2094 // Test the double value. Zero and NaN are false. |
| 2094 __ lzdr(kDoubleRegZero); | 2095 __ lzdr(kDoubleRegZero); |
| 2095 __ cdbr(dbl_scratch, kDoubleRegZero); | 2096 __ cdbr(dbl_scratch, kDoubleRegZero); |
| 2096 Condition lt_gt = static_cast<Condition>(lt | gt); | 2097 Condition lt_gt = static_cast<Condition>(lt | gt); |
| 2097 EmitBranch(instr, lt_gt); | 2098 EmitBranch(instr, lt_gt); |
| 2098 } else if (type.IsString()) { | 2099 } else if (type.IsString()) { |
| 2099 DCHECK(!info()->IsStub()); | 2100 DCHECK(!info()->IsStub()); |
| 2100 __ LoadP(ip, FieldMemOperand(reg, String::kLengthOffset)); | 2101 __ LoadP(ip, FieldMemOperand(reg, String::kLengthOffset)); |
| 2101 __ CmpP(ip, Operand::Zero()); | 2102 __ CmpP(ip, Operand::Zero()); |
| 2102 EmitBranch(instr, ne); | 2103 EmitBranch(instr, ne); |
| (...skipping 616 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2719 if (access.IsExternalMemory()) { | 2720 if (access.IsExternalMemory()) { |
| 2720 Register result = ToRegister(instr->result()); | 2721 Register result = ToRegister(instr->result()); |
| 2721 MemOperand operand = MemOperand(object, offset); | 2722 MemOperand operand = MemOperand(object, offset); |
| 2722 __ LoadRepresentation(result, operand, access.representation(), r0); | 2723 __ LoadRepresentation(result, operand, access.representation(), r0); |
| 2723 return; | 2724 return; |
| 2724 } | 2725 } |
| 2725 | 2726 |
| 2726 if (instr->hydrogen()->representation().IsDouble()) { | 2727 if (instr->hydrogen()->representation().IsDouble()) { |
| 2727 DCHECK(access.IsInobject()); | 2728 DCHECK(access.IsInobject()); |
| 2728 DoubleRegister result = ToDoubleRegister(instr->result()); | 2729 DoubleRegister result = ToDoubleRegister(instr->result()); |
| 2729 __ ld(result, FieldMemOperand(object, offset)); | 2730 __ LoadDouble(result, FieldMemOperand(object, offset)); |
| 2730 return; | 2731 return; |
| 2731 } | 2732 } |
| 2732 | 2733 |
| 2733 Register result = ToRegister(instr->result()); | 2734 Register result = ToRegister(instr->result()); |
| 2734 if (!access.IsInobject()) { | 2735 if (!access.IsInobject()) { |
| 2735 __ LoadP(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); | 2736 __ LoadP(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); |
| 2736 object = result; | 2737 object = result; |
| 2737 } | 2738 } |
| 2738 | 2739 |
| 2739 Representation representation = access.representation(); | 2740 Representation representation = access.representation(); |
| (...skipping 129 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2869 use_scratch = true; | 2870 use_scratch = true; |
| 2870 } | 2871 } |
| 2871 if (elements_kind == FLOAT32_ELEMENTS) { | 2872 if (elements_kind == FLOAT32_ELEMENTS) { |
| 2872 if (!use_scratch) { | 2873 if (!use_scratch) { |
| 2873 __ ldeb(result, MemOperand(external_pointer, base_offset)); | 2874 __ ldeb(result, MemOperand(external_pointer, base_offset)); |
| 2874 } else { | 2875 } else { |
| 2875 __ ldeb(result, MemOperand(scratch0(), external_pointer, base_offset)); | 2876 __ ldeb(result, MemOperand(scratch0(), external_pointer, base_offset)); |
| 2876 } | 2877 } |
| 2877 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS | 2878 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS |
| 2878 if (!use_scratch) { | 2879 if (!use_scratch) { |
| 2879 __ ld(result, MemOperand(external_pointer, base_offset)); | 2880 __ LoadDouble(result, MemOperand(external_pointer, base_offset)); |
| 2880 } else { | 2881 } else { |
| 2881 __ ld(result, MemOperand(scratch0(), external_pointer, base_offset)); | 2882 __ LoadDouble(result, |
| 2883 MemOperand(scratch0(), external_pointer, base_offset)); |
| 2882 } | 2884 } |
| 2883 } | 2885 } |
| 2884 } else { | 2886 } else { |
| 2885 Register result = ToRegister(instr->result()); | 2887 Register result = ToRegister(instr->result()); |
| 2886 MemOperand mem_operand = | 2888 MemOperand mem_operand = |
| 2887 PrepareKeyedOperand(key, external_pointer, key_is_constant, key_is_smi, | 2889 PrepareKeyedOperand(key, external_pointer, key_is_constant, key_is_smi, |
| 2888 constant_key, element_size_shift, base_offset, | 2890 constant_key, element_size_shift, base_offset, |
| 2889 keyMaybeNegative); | 2891 keyMaybeNegative); |
| 2890 switch (elements_kind) { | 2892 switch (elements_kind) { |
| 2891 case INT8_ELEMENTS: | 2893 case INT8_ELEMENTS: |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2966 use_scratch = true; | 2968 use_scratch = true; |
| 2967 if (key_is_constant) { | 2969 if (key_is_constant) { |
| 2968 __ mov(scratch, Operand(base_offset)); | 2970 __ mov(scratch, Operand(base_offset)); |
| 2969 } else { | 2971 } else { |
| 2970 __ AddP(scratch, Operand(base_offset)); | 2972 __ AddP(scratch, Operand(base_offset)); |
| 2971 } | 2973 } |
| 2972 base_offset = 0; | 2974 base_offset = 0; |
| 2973 } | 2975 } |
| 2974 | 2976 |
| 2975 if (!use_scratch) { | 2977 if (!use_scratch) { |
| 2976 __ ld(result, MemOperand(elements, base_offset)); | 2978 __ LoadDouble(result, MemOperand(elements, base_offset)); |
| 2977 } else { | 2979 } else { |
| 2978 __ ld(result, MemOperand(scratch, elements, base_offset)); | 2980 __ LoadDouble(result, MemOperand(scratch, elements, base_offset)); |
| 2979 } | 2981 } |
| 2980 | 2982 |
| 2981 if (instr->hydrogen()->RequiresHoleCheck()) { | 2983 if (instr->hydrogen()->RequiresHoleCheck()) { |
| 2982 if (!use_scratch) { | 2984 if (!use_scratch) { |
| 2983 __ LoadlW(r0, | 2985 __ LoadlW(r0, |
| 2984 MemOperand(elements, base_offset + Register::kExponentOffset)); | 2986 MemOperand(elements, base_offset + Register::kExponentOffset)); |
| 2985 } else { | 2987 } else { |
| 2986 __ LoadlW(r0, MemOperand(scratch, elements, | 2988 __ LoadlW(r0, MemOperand(scratch, elements, |
| 2987 base_offset + Register::kExponentOffset)); | 2989 base_offset + Register::kExponentOffset)); |
| 2988 } | 2990 } |
| (...skipping 910 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3899 #else | 3901 #else |
| 3900 DCHECK(!representation.IsSmi() || !instr->value()->IsConstantOperand() || | 3902 DCHECK(!representation.IsSmi() || !instr->value()->IsConstantOperand() || |
| 3901 IsSmi(LConstantOperand::cast(instr->value()))); | 3903 IsSmi(LConstantOperand::cast(instr->value()))); |
| 3902 #endif | 3904 #endif |
| 3903 if (!FLAG_unbox_double_fields && representation.IsDouble()) { | 3905 if (!FLAG_unbox_double_fields && representation.IsDouble()) { |
| 3904 DCHECK(access.IsInobject()); | 3906 DCHECK(access.IsInobject()); |
| 3905 DCHECK(!hinstr->has_transition()); | 3907 DCHECK(!hinstr->has_transition()); |
| 3906 DCHECK(!hinstr->NeedsWriteBarrier()); | 3908 DCHECK(!hinstr->NeedsWriteBarrier()); |
| 3907 DoubleRegister value = ToDoubleRegister(instr->value()); | 3909 DoubleRegister value = ToDoubleRegister(instr->value()); |
| 3908 DCHECK(offset >= 0); | 3910 DCHECK(offset >= 0); |
| 3909 __ std(value, FieldMemOperand(object, offset)); | 3911 __ StoreDouble(value, FieldMemOperand(object, offset)); |
| 3910 return; | 3912 return; |
| 3911 } | 3913 } |
| 3912 | 3914 |
| 3913 if (hinstr->has_transition()) { | 3915 if (hinstr->has_transition()) { |
| 3914 Handle<Map> transition = hinstr->transition_map(); | 3916 Handle<Map> transition = hinstr->transition_map(); |
| 3915 AddDeprecationDependency(transition); | 3917 AddDeprecationDependency(transition); |
| 3916 __ mov(scratch, Operand(transition)); | 3918 __ mov(scratch, Operand(transition)); |
| 3917 __ StoreP(scratch, FieldMemOperand(object, HeapObject::kMapOffset), r0); | 3919 __ StoreP(scratch, FieldMemOperand(object, HeapObject::kMapOffset), r0); |
| 3918 if (hinstr->NeedsWriteBarrierForMap()) { | 3920 if (hinstr->NeedsWriteBarrierForMap()) { |
| 3919 Register temp = ToRegister(instr->temp()); | 3921 Register temp = ToRegister(instr->temp()); |
| 3920 // Update the write barrier for the map field. | 3922 // Update the write barrier for the map field. |
| 3921 __ RecordWriteForMap(object, scratch, temp, GetLinkRegisterState(), | 3923 __ RecordWriteForMap(object, scratch, temp, GetLinkRegisterState(), |
| 3922 kSaveFPRegs); | 3924 kSaveFPRegs); |
| 3923 } | 3925 } |
| 3924 } | 3926 } |
| 3925 | 3927 |
| 3926 // Do the store. | 3928 // Do the store. |
| 3927 Register record_dest = object; | 3929 Register record_dest = object; |
| 3928 Register record_value = no_reg; | 3930 Register record_value = no_reg; |
| 3929 Register record_scratch = scratch; | 3931 Register record_scratch = scratch; |
| 3930 #if V8_TARGET_ARCH_S390X | 3932 #if V8_TARGET_ARCH_S390X |
| 3931 if (FLAG_unbox_double_fields && representation.IsDouble()) { | 3933 if (FLAG_unbox_double_fields && representation.IsDouble()) { |
| 3932 DCHECK(access.IsInobject()); | 3934 DCHECK(access.IsInobject()); |
| 3933 DoubleRegister value = ToDoubleRegister(instr->value()); | 3935 DoubleRegister value = ToDoubleRegister(instr->value()); |
| 3934 __ std(value, FieldMemOperand(object, offset)); | 3936 __ StoreDouble(value, FieldMemOperand(object, offset)); |
| 3935 if (hinstr->NeedsWriteBarrier()) { | 3937 if (hinstr->NeedsWriteBarrier()) { |
| 3936 record_value = ToRegister(instr->value()); | 3938 record_value = ToRegister(instr->value()); |
| 3937 } | 3939 } |
| 3938 } else { | 3940 } else { |
| 3939 if (representation.IsSmi() && | 3941 if (representation.IsSmi() && |
| 3940 hinstr->value()->representation().IsInteger32()) { | 3942 hinstr->value()->representation().IsInteger32()) { |
| 3941 DCHECK(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); | 3943 DCHECK(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); |
| 3942 // 64-bit Smi optimization | 3944 // 64-bit Smi optimization |
| 3943 // Store int value directly to upper half of the smi. | 3945 // Store int value directly to upper half of the smi. |
| 3944 offset = SmiWordOffset(offset); | 3946 offset = SmiWordOffset(offset); |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4153 __ AddP(scratch, Operand(address_offset)); | 4155 __ AddP(scratch, Operand(address_offset)); |
| 4154 address_offset = 0; | 4156 address_offset = 0; |
| 4155 } | 4157 } |
| 4156 } | 4158 } |
| 4157 | 4159 |
| 4158 if (instr->NeedsCanonicalization()) { | 4160 if (instr->NeedsCanonicalization()) { |
| 4159 // Turn potential sNaN value into qNaN. | 4161 // Turn potential sNaN value into qNaN. |
| 4160 __ CanonicalizeNaN(double_scratch, value); | 4162 __ CanonicalizeNaN(double_scratch, value); |
| 4161 DCHECK(address_offset >= 0); | 4163 DCHECK(address_offset >= 0); |
| 4162 if (use_scratch) | 4164 if (use_scratch) |
| 4163 __ std(double_scratch, MemOperand(scratch, elements, address_offset)); | 4165 __ StoreDouble(double_scratch, |
| 4166 MemOperand(scratch, elements, address_offset)); |
| 4164 else | 4167 else |
| 4165 __ std(double_scratch, MemOperand(elements, address_offset)); | 4168 __ StoreDouble(double_scratch, MemOperand(elements, address_offset)); |
| 4166 } else { | 4169 } else { |
| 4167 if (use_scratch) | 4170 if (use_scratch) |
| 4168 __ std(value, MemOperand(scratch, elements, address_offset)); | 4171 __ StoreDouble(value, MemOperand(scratch, elements, address_offset)); |
| 4169 else | 4172 else |
| 4170 __ std(value, MemOperand(elements, address_offset)); | 4173 __ StoreDouble(value, MemOperand(elements, address_offset)); |
| 4171 } | 4174 } |
| 4172 } | 4175 } |
| 4173 | 4176 |
| 4174 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { | 4177 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { |
| 4175 HStoreKeyed* hinstr = instr->hydrogen(); | 4178 HStoreKeyed* hinstr = instr->hydrogen(); |
| 4176 Register value = ToRegister(instr->value()); | 4179 Register value = ToRegister(instr->value()); |
| 4177 Register elements = ToRegister(instr->elements()); | 4180 Register elements = ToRegister(instr->elements()); |
| 4178 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; | 4181 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; |
| 4179 Register scratch = scratch0(); | 4182 Register scratch = scratch0(); |
| 4180 int offset = instr->base_offset(); | 4183 int offset = instr->base_offset(); |
| (...skipping 560 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4741 // Heap number map check. | 4744 // Heap number map check. |
| 4742 __ LoadP(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); | 4745 __ LoadP(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); |
| 4743 __ CmpP(scratch, RootMemOperand(Heap::kHeapNumberMapRootIndex)); | 4746 __ CmpP(scratch, RootMemOperand(Heap::kHeapNumberMapRootIndex)); |
| 4744 | 4747 |
| 4745 if (can_convert_undefined_to_nan) { | 4748 if (can_convert_undefined_to_nan) { |
| 4746 __ bne(&convert, Label::kNear); | 4749 __ bne(&convert, Label::kNear); |
| 4747 } else { | 4750 } else { |
| 4748 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber); | 4751 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber); |
| 4749 } | 4752 } |
| 4750 // load heap number | 4753 // load heap number |
| 4751 __ ld(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 4754 __ LoadDouble(result_reg, |
| 4755 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); |
| 4752 if (deoptimize_on_minus_zero) { | 4756 if (deoptimize_on_minus_zero) { |
| 4753 __ TestDoubleIsMinusZero(result_reg, scratch, ip); | 4757 __ TestDoubleIsMinusZero(result_reg, scratch, ip); |
| 4754 DeoptimizeIf(eq, instr, DeoptimizeReason::kMinusZero); | 4758 DeoptimizeIf(eq, instr, DeoptimizeReason::kMinusZero); |
| 4755 } | 4759 } |
| 4756 __ b(&done, Label::kNear); | 4760 __ b(&done, Label::kNear); |
| 4757 if (can_convert_undefined_to_nan) { | 4761 if (can_convert_undefined_to_nan) { |
| 4758 __ bind(&convert); | 4762 __ bind(&convert); |
| 4759 // Convert undefined (and hole) to NaN. | 4763 // Convert undefined (and hole) to NaN. |
| 4760 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); | 4764 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); |
| 4761 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined); | 4765 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined); |
| 4762 __ LoadRoot(scratch, Heap::kNanValueRootIndex); | 4766 __ LoadRoot(scratch, Heap::kNanValueRootIndex); |
| 4763 __ ld(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); | 4767 __ LoadDouble(result_reg, |
| 4768 FieldMemOperand(scratch, HeapNumber::kValueOffset)); |
| 4764 __ b(&done, Label::kNear); | 4769 __ b(&done, Label::kNear); |
| 4765 } | 4770 } |
| 4766 } else { | 4771 } else { |
| 4767 __ SmiUntag(scratch, input_reg); | 4772 __ SmiUntag(scratch, input_reg); |
| 4768 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); | 4773 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); |
| 4769 } | 4774 } |
| 4770 // Smi to double register conversion | 4775 // Smi to double register conversion |
| 4771 __ bind(&load_smi); | 4776 __ bind(&load_smi); |
| 4772 // scratch: untagged value of input_reg | 4777 // scratch: untagged value of input_reg |
| 4773 __ ConvertIntToDouble(scratch, result_reg); | 4778 __ ConvertIntToDouble(scratch, result_reg); |
| (...skipping 40 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 4814 __ b(&done, Label::kNear); | 4819 __ b(&done, Label::kNear); |
| 4815 | 4820 |
| 4816 __ bind(&check_false); | 4821 __ bind(&check_false); |
| 4817 __ CompareRoot(input_reg, Heap::kFalseValueRootIndex); | 4822 __ CompareRoot(input_reg, Heap::kFalseValueRootIndex); |
| 4818 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefinedBoolean); | 4823 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefinedBoolean); |
| 4819 __ LoadImmP(input_reg, Operand::Zero()); | 4824 __ LoadImmP(input_reg, Operand::Zero()); |
| 4820 } else { | 4825 } else { |
| 4821 // Deoptimize if we don't have a heap number. | 4826 // Deoptimize if we don't have a heap number. |
| 4822 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber); | 4827 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber); |
| 4823 | 4828 |
| 4824 __ ld(double_scratch2, | 4829 __ LoadDouble(double_scratch2, |
| 4825 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 4830 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); |
| 4826 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 4831 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 4827 // preserve heap number pointer in scratch2 for minus zero check below | 4832 // preserve heap number pointer in scratch2 for minus zero check below |
| 4828 __ LoadRR(scratch2, input_reg); | 4833 __ LoadRR(scratch2, input_reg); |
| 4829 } | 4834 } |
| 4830 __ TryDoubleToInt32Exact(input_reg, double_scratch2, scratch1, | 4835 __ TryDoubleToInt32Exact(input_reg, double_scratch2, scratch1, |
| 4831 double_scratch); | 4836 double_scratch); |
| 4832 DeoptimizeIf(ne, instr, DeoptimizeReason::kLostPrecisionOrNaN); | 4837 DeoptimizeIf(ne, instr, DeoptimizeReason::kLostPrecisionOrNaN); |
| 4833 | 4838 |
| 4834 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { | 4839 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { |
| 4835 __ CmpP(input_reg, Operand::Zero()); | 4840 __ CmpP(input_reg, Operand::Zero()); |
| (...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5129 | 5134 |
| 5130 // Check for undefined. Undefined is converted to zero for clamping | 5135 // Check for undefined. Undefined is converted to zero for clamping |
| 5131 // conversions. | 5136 // conversions. |
| 5132 __ CmpP(input_reg, Operand(factory()->undefined_value())); | 5137 __ CmpP(input_reg, Operand(factory()->undefined_value())); |
| 5133 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined); | 5138 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined); |
| 5134 __ LoadImmP(result_reg, Operand::Zero()); | 5139 __ LoadImmP(result_reg, Operand::Zero()); |
| 5135 __ b(&done, Label::kNear); | 5140 __ b(&done, Label::kNear); |
| 5136 | 5141 |
| 5137 // Heap number | 5142 // Heap number |
| 5138 __ bind(&heap_number); | 5143 __ bind(&heap_number); |
| 5139 __ ld(temp_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); | 5144 __ LoadDouble(temp_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); |
| 5140 __ ClampDoubleToUint8(result_reg, temp_reg, double_scratch0()); | 5145 __ ClampDoubleToUint8(result_reg, temp_reg, double_scratch0()); |
| 5141 __ b(&done, Label::kNear); | 5146 __ b(&done, Label::kNear); |
| 5142 | 5147 |
| 5143 // smi | 5148 // smi |
| 5144 __ bind(&is_smi); | 5149 __ bind(&is_smi); |
| 5145 __ ClampUint8(result_reg, result_reg); | 5150 __ ClampUint8(result_reg, result_reg); |
| 5146 | 5151 |
| 5147 __ bind(&done); | 5152 __ bind(&done); |
| 5148 } | 5153 } |
| 5149 | 5154 |
| (...skipping 480 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 5630 __ LoadP(result, | 5635 __ LoadP(result, |
| 5631 FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize)); | 5636 FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize)); |
| 5632 __ bind(deferred->exit()); | 5637 __ bind(deferred->exit()); |
| 5633 __ bind(&done); | 5638 __ bind(&done); |
| 5634 } | 5639 } |
| 5635 | 5640 |
| 5636 #undef __ | 5641 #undef __ |
| 5637 | 5642 |
| 5638 } // namespace internal | 5643 } // namespace internal |
| 5639 } // namespace v8 | 5644 } // namespace v8 |
| OLD | NEW |