Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(80)

Side by Side Diff: src/crankshaft/s390/lithium-codegen-s390.cc

Issue 2492913002: s390: fix overflowing offset in std and ld (Closed)
Patch Set: Created 4 years, 1 month ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « no previous file | no next file » | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // 2 //
3 // Use of this source code is governed by a BSD-style license that can be 3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file. 4 // found in the LICENSE file.
5 5
6 #include "src/crankshaft/s390/lithium-codegen-s390.h" 6 #include "src/crankshaft/s390/lithium-codegen-s390.h"
7 7
8 #include "src/base/bits.h" 8 #include "src/base/bits.h"
9 #include "src/code-factory.h" 9 #include "src/code-factory.h"
10 #include "src/code-stubs.h" 10 #include "src/code-stubs.h"
(...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after
59 } 59 }
60 60
61 void LCodeGen::SaveCallerDoubles() { 61 void LCodeGen::SaveCallerDoubles() {
62 DCHECK(info()->saves_caller_doubles()); 62 DCHECK(info()->saves_caller_doubles());
63 DCHECK(NeedsEagerFrame()); 63 DCHECK(NeedsEagerFrame());
64 Comment(";;; Save clobbered callee double registers"); 64 Comment(";;; Save clobbered callee double registers");
65 int count = 0; 65 int count = 0;
66 BitVector* doubles = chunk()->allocated_double_registers(); 66 BitVector* doubles = chunk()->allocated_double_registers();
67 BitVector::Iterator save_iterator(doubles); 67 BitVector::Iterator save_iterator(doubles);
68 while (!save_iterator.Done()) { 68 while (!save_iterator.Done()) {
69 __ std(DoubleRegister::from_code(save_iterator.Current()), 69 __ StoreDouble(DoubleRegister::from_code(save_iterator.Current()),
70 MemOperand(sp, count * kDoubleSize)); 70 MemOperand(sp, count * kDoubleSize));
71 save_iterator.Advance(); 71 save_iterator.Advance();
72 count++; 72 count++;
73 } 73 }
74 } 74 }
75 75
76 void LCodeGen::RestoreCallerDoubles() { 76 void LCodeGen::RestoreCallerDoubles() {
77 DCHECK(info()->saves_caller_doubles()); 77 DCHECK(info()->saves_caller_doubles());
78 DCHECK(NeedsEagerFrame()); 78 DCHECK(NeedsEagerFrame());
79 Comment(";;; Restore clobbered callee double registers"); 79 Comment(";;; Restore clobbered callee double registers");
80 BitVector* doubles = chunk()->allocated_double_registers(); 80 BitVector* doubles = chunk()->allocated_double_registers();
81 BitVector::Iterator save_iterator(doubles); 81 BitVector::Iterator save_iterator(doubles);
82 int count = 0; 82 int count = 0;
83 while (!save_iterator.Done()) { 83 while (!save_iterator.Done()) {
84 __ ld(DoubleRegister::from_code(save_iterator.Current()), 84 __ LoadDouble(DoubleRegister::from_code(save_iterator.Current()),
85 MemOperand(sp, count * kDoubleSize)); 85 MemOperand(sp, count * kDoubleSize));
86 save_iterator.Advance(); 86 save_iterator.Advance();
87 count++; 87 count++;
88 } 88 }
89 } 89 }
90 90
91 bool LCodeGen::GeneratePrologue() { 91 bool LCodeGen::GeneratePrologue() {
92 DCHECK(is_generating()); 92 DCHECK(is_generating());
93 93
94 if (info()->IsOptimizing()) { 94 if (info()->IsOptimizing()) {
95 ProfileEntryHookStub::MaybeCallEntryHook(masm_); 95 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
(...skipping 1986 matching lines...) Expand 10 before | Expand all | Expand 10 after
2082 EmitBranch(instr, eq); 2082 EmitBranch(instr, eq);
2083 } else if (type.IsSmi()) { 2083 } else if (type.IsSmi()) {
2084 DCHECK(!info()->IsStub()); 2084 DCHECK(!info()->IsStub());
2085 __ CmpP(reg, Operand::Zero()); 2085 __ CmpP(reg, Operand::Zero());
2086 EmitBranch(instr, ne); 2086 EmitBranch(instr, ne);
2087 } else if (type.IsJSArray()) { 2087 } else if (type.IsJSArray()) {
2088 DCHECK(!info()->IsStub()); 2088 DCHECK(!info()->IsStub());
2089 EmitBranch(instr, al); 2089 EmitBranch(instr, al);
2090 } else if (type.IsHeapNumber()) { 2090 } else if (type.IsHeapNumber()) {
2091 DCHECK(!info()->IsStub()); 2091 DCHECK(!info()->IsStub());
2092 __ ld(dbl_scratch, FieldMemOperand(reg, HeapNumber::kValueOffset)); 2092 __ LoadDouble(dbl_scratch,
2093 FieldMemOperand(reg, HeapNumber::kValueOffset));
2093 // Test the double value. Zero and NaN are false. 2094 // Test the double value. Zero and NaN are false.
2094 __ lzdr(kDoubleRegZero); 2095 __ lzdr(kDoubleRegZero);
2095 __ cdbr(dbl_scratch, kDoubleRegZero); 2096 __ cdbr(dbl_scratch, kDoubleRegZero);
2096 Condition lt_gt = static_cast<Condition>(lt | gt); 2097 Condition lt_gt = static_cast<Condition>(lt | gt);
2097 EmitBranch(instr, lt_gt); 2098 EmitBranch(instr, lt_gt);
2098 } else if (type.IsString()) { 2099 } else if (type.IsString()) {
2099 DCHECK(!info()->IsStub()); 2100 DCHECK(!info()->IsStub());
2100 __ LoadP(ip, FieldMemOperand(reg, String::kLengthOffset)); 2101 __ LoadP(ip, FieldMemOperand(reg, String::kLengthOffset));
2101 __ CmpP(ip, Operand::Zero()); 2102 __ CmpP(ip, Operand::Zero());
2102 EmitBranch(instr, ne); 2103 EmitBranch(instr, ne);
(...skipping 568 matching lines...) Expand 10 before | Expand all | Expand 10 after
2671 if (access.IsExternalMemory()) { 2672 if (access.IsExternalMemory()) {
2672 Register result = ToRegister(instr->result()); 2673 Register result = ToRegister(instr->result());
2673 MemOperand operand = MemOperand(object, offset); 2674 MemOperand operand = MemOperand(object, offset);
2674 __ LoadRepresentation(result, operand, access.representation(), r0); 2675 __ LoadRepresentation(result, operand, access.representation(), r0);
2675 return; 2676 return;
2676 } 2677 }
2677 2678
2678 if (instr->hydrogen()->representation().IsDouble()) { 2679 if (instr->hydrogen()->representation().IsDouble()) {
2679 DCHECK(access.IsInobject()); 2680 DCHECK(access.IsInobject());
2680 DoubleRegister result = ToDoubleRegister(instr->result()); 2681 DoubleRegister result = ToDoubleRegister(instr->result());
2681 __ ld(result, FieldMemOperand(object, offset)); 2682 __ LoadDouble(result, FieldMemOperand(object, offset));
2682 return; 2683 return;
2683 } 2684 }
2684 2685
2685 Register result = ToRegister(instr->result()); 2686 Register result = ToRegister(instr->result());
2686 if (!access.IsInobject()) { 2687 if (!access.IsInobject()) {
2687 __ LoadP(result, FieldMemOperand(object, JSObject::kPropertiesOffset)); 2688 __ LoadP(result, FieldMemOperand(object, JSObject::kPropertiesOffset));
2688 object = result; 2689 object = result;
2689 } 2690 }
2690 2691
2691 Representation representation = access.representation(); 2692 Representation representation = access.representation();
(...skipping 117 matching lines...) Expand 10 before | Expand all | Expand 10 after
2809 use_scratch = true; 2810 use_scratch = true;
2810 } 2811 }
2811 if (elements_kind == FLOAT32_ELEMENTS) { 2812 if (elements_kind == FLOAT32_ELEMENTS) {
2812 if (!use_scratch) { 2813 if (!use_scratch) {
2813 __ ldeb(result, MemOperand(external_pointer, base_offset)); 2814 __ ldeb(result, MemOperand(external_pointer, base_offset));
2814 } else { 2815 } else {
2815 __ ldeb(result, MemOperand(scratch0(), external_pointer, base_offset)); 2816 __ ldeb(result, MemOperand(scratch0(), external_pointer, base_offset));
2816 } 2817 }
2817 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS 2818 } else { // i.e. elements_kind == EXTERNAL_DOUBLE_ELEMENTS
2818 if (!use_scratch) { 2819 if (!use_scratch) {
2819 __ ld(result, MemOperand(external_pointer, base_offset)); 2820 __ LoadDouble(result, MemOperand(external_pointer, base_offset));
2820 } else { 2821 } else {
2821 __ ld(result, MemOperand(scratch0(), external_pointer, base_offset)); 2822 __ LoadDouble(result,
2823 MemOperand(scratch0(), external_pointer, base_offset));
2822 } 2824 }
2823 } 2825 }
2824 } else { 2826 } else {
2825 Register result = ToRegister(instr->result()); 2827 Register result = ToRegister(instr->result());
2826 MemOperand mem_operand = 2828 MemOperand mem_operand =
2827 PrepareKeyedOperand(key, external_pointer, key_is_constant, key_is_smi, 2829 PrepareKeyedOperand(key, external_pointer, key_is_constant, key_is_smi,
2828 constant_key, element_size_shift, base_offset, 2830 constant_key, element_size_shift, base_offset,
2829 keyMaybeNegative); 2831 keyMaybeNegative);
2830 switch (elements_kind) { 2832 switch (elements_kind) {
2831 case INT8_ELEMENTS: 2833 case INT8_ELEMENTS:
(...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after
2906 use_scratch = true; 2908 use_scratch = true;
2907 if (key_is_constant) { 2909 if (key_is_constant) {
2908 __ mov(scratch, Operand(base_offset)); 2910 __ mov(scratch, Operand(base_offset));
2909 } else { 2911 } else {
2910 __ AddP(scratch, Operand(base_offset)); 2912 __ AddP(scratch, Operand(base_offset));
2911 } 2913 }
2912 base_offset = 0; 2914 base_offset = 0;
2913 } 2915 }
2914 2916
2915 if (!use_scratch) { 2917 if (!use_scratch) {
2916 __ ld(result, MemOperand(elements, base_offset)); 2918 __ LoadDouble(result, MemOperand(elements, base_offset));
2917 } else { 2919 } else {
2918 __ ld(result, MemOperand(scratch, elements, base_offset)); 2920 __ LoadDouble(result, MemOperand(scratch, elements, base_offset));
2919 } 2921 }
2920 2922
2921 if (instr->hydrogen()->RequiresHoleCheck()) { 2923 if (instr->hydrogen()->RequiresHoleCheck()) {
2922 if (!use_scratch) { 2924 if (!use_scratch) {
2923 __ LoadlW(r0, 2925 __ LoadlW(r0,
2924 MemOperand(elements, base_offset + Register::kExponentOffset)); 2926 MemOperand(elements, base_offset + Register::kExponentOffset));
2925 } else { 2927 } else {
2926 __ LoadlW(r0, MemOperand(scratch, elements, 2928 __ LoadlW(r0, MemOperand(scratch, elements,
2927 base_offset + Register::kExponentOffset)); 2929 base_offset + Register::kExponentOffset));
2928 } 2930 }
(...skipping 900 matching lines...) Expand 10 before | Expand all | Expand 10 after
3829 #else 3831 #else
3830 DCHECK(!representation.IsSmi() || !instr->value()->IsConstantOperand() || 3832 DCHECK(!representation.IsSmi() || !instr->value()->IsConstantOperand() ||
3831 IsSmi(LConstantOperand::cast(instr->value()))); 3833 IsSmi(LConstantOperand::cast(instr->value())));
3832 #endif 3834 #endif
3833 if (!FLAG_unbox_double_fields && representation.IsDouble()) { 3835 if (!FLAG_unbox_double_fields && representation.IsDouble()) {
3834 DCHECK(access.IsInobject()); 3836 DCHECK(access.IsInobject());
3835 DCHECK(!hinstr->has_transition()); 3837 DCHECK(!hinstr->has_transition());
3836 DCHECK(!hinstr->NeedsWriteBarrier()); 3838 DCHECK(!hinstr->NeedsWriteBarrier());
3837 DoubleRegister value = ToDoubleRegister(instr->value()); 3839 DoubleRegister value = ToDoubleRegister(instr->value());
3838 DCHECK(offset >= 0); 3840 DCHECK(offset >= 0);
3839 __ std(value, FieldMemOperand(object, offset)); 3841 __ StoreDouble(value, FieldMemOperand(object, offset));
3840 return; 3842 return;
3841 } 3843 }
3842 3844
3843 if (hinstr->has_transition()) { 3845 if (hinstr->has_transition()) {
3844 Handle<Map> transition = hinstr->transition_map(); 3846 Handle<Map> transition = hinstr->transition_map();
3845 AddDeprecationDependency(transition); 3847 AddDeprecationDependency(transition);
3846 __ mov(scratch, Operand(transition)); 3848 __ mov(scratch, Operand(transition));
3847 __ StoreP(scratch, FieldMemOperand(object, HeapObject::kMapOffset), r0); 3849 __ StoreP(scratch, FieldMemOperand(object, HeapObject::kMapOffset), r0);
3848 if (hinstr->NeedsWriteBarrierForMap()) { 3850 if (hinstr->NeedsWriteBarrierForMap()) {
3849 Register temp = ToRegister(instr->temp()); 3851 Register temp = ToRegister(instr->temp());
3850 // Update the write barrier for the map field. 3852 // Update the write barrier for the map field.
3851 __ RecordWriteForMap(object, scratch, temp, GetLinkRegisterState(), 3853 __ RecordWriteForMap(object, scratch, temp, GetLinkRegisterState(),
3852 kSaveFPRegs); 3854 kSaveFPRegs);
3853 } 3855 }
3854 } 3856 }
3855 3857
3856 // Do the store. 3858 // Do the store.
3857 Register record_dest = object; 3859 Register record_dest = object;
3858 Register record_value = no_reg; 3860 Register record_value = no_reg;
3859 Register record_scratch = scratch; 3861 Register record_scratch = scratch;
3860 #if V8_TARGET_ARCH_S390X 3862 #if V8_TARGET_ARCH_S390X
3861 if (FLAG_unbox_double_fields && representation.IsDouble()) { 3863 if (FLAG_unbox_double_fields && representation.IsDouble()) {
3862 DCHECK(access.IsInobject()); 3864 DCHECK(access.IsInobject());
3863 DoubleRegister value = ToDoubleRegister(instr->value()); 3865 DoubleRegister value = ToDoubleRegister(instr->value());
3864 __ std(value, FieldMemOperand(object, offset)); 3866 __ StoreDouble(value, FieldMemOperand(object, offset));
3865 if (hinstr->NeedsWriteBarrier()) { 3867 if (hinstr->NeedsWriteBarrier()) {
3866 record_value = ToRegister(instr->value()); 3868 record_value = ToRegister(instr->value());
3867 } 3869 }
3868 } else { 3870 } else {
3869 if (representation.IsSmi() && 3871 if (representation.IsSmi() &&
3870 hinstr->value()->representation().IsInteger32()) { 3872 hinstr->value()->representation().IsInteger32()) {
3871 DCHECK(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY); 3873 DCHECK(hinstr->store_mode() == STORE_TO_INITIALIZED_ENTRY);
3872 // 64-bit Smi optimization 3874 // 64-bit Smi optimization
3873 // Store int value directly to upper half of the smi. 3875 // Store int value directly to upper half of the smi.
3874 offset = SmiWordOffset(offset); 3876 offset = SmiWordOffset(offset);
(...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after
4083 __ AddP(scratch, Operand(address_offset)); 4085 __ AddP(scratch, Operand(address_offset));
4084 address_offset = 0; 4086 address_offset = 0;
4085 } 4087 }
4086 } 4088 }
4087 4089
4088 if (instr->NeedsCanonicalization()) { 4090 if (instr->NeedsCanonicalization()) {
4089 // Turn potential sNaN value into qNaN. 4091 // Turn potential sNaN value into qNaN.
4090 __ CanonicalizeNaN(double_scratch, value); 4092 __ CanonicalizeNaN(double_scratch, value);
4091 DCHECK(address_offset >= 0); 4093 DCHECK(address_offset >= 0);
4092 if (use_scratch) 4094 if (use_scratch)
4093 __ std(double_scratch, MemOperand(scratch, elements, address_offset)); 4095 __ StoreDouble(double_scratch,
4096 MemOperand(scratch, elements, address_offset));
4094 else 4097 else
4095 __ std(double_scratch, MemOperand(elements, address_offset)); 4098 __ StoreDouble(double_scratch, MemOperand(elements, address_offset));
4096 } else { 4099 } else {
4097 if (use_scratch) 4100 if (use_scratch)
4098 __ std(value, MemOperand(scratch, elements, address_offset)); 4101 __ StoreDouble(value, MemOperand(scratch, elements, address_offset));
4099 else 4102 else
4100 __ std(value, MemOperand(elements, address_offset)); 4103 __ StoreDouble(value, MemOperand(elements, address_offset));
4101 } 4104 }
4102 } 4105 }
4103 4106
4104 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { 4107 void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) {
4105 HStoreKeyed* hinstr = instr->hydrogen(); 4108 HStoreKeyed* hinstr = instr->hydrogen();
4106 Register value = ToRegister(instr->value()); 4109 Register value = ToRegister(instr->value());
4107 Register elements = ToRegister(instr->elements()); 4110 Register elements = ToRegister(instr->elements());
4108 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg; 4111 Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
4109 Register scratch = scratch0(); 4112 Register scratch = scratch0();
4110 int offset = instr->base_offset(); 4113 int offset = instr->base_offset();
(...skipping 559 matching lines...) Expand 10 before | Expand all | Expand 10 after
4670 // Heap number map check. 4673 // Heap number map check.
4671 __ LoadP(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset)); 4674 __ LoadP(scratch, FieldMemOperand(input_reg, HeapObject::kMapOffset));
4672 __ CmpP(scratch, RootMemOperand(Heap::kHeapNumberMapRootIndex)); 4675 __ CmpP(scratch, RootMemOperand(Heap::kHeapNumberMapRootIndex));
4673 4676
4674 if (can_convert_undefined_to_nan) { 4677 if (can_convert_undefined_to_nan) {
4675 __ bne(&convert, Label::kNear); 4678 __ bne(&convert, Label::kNear);
4676 } else { 4679 } else {
4677 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber); 4680 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber);
4678 } 4681 }
4679 // load heap number 4682 // load heap number
4680 __ ld(result_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); 4683 __ LoadDouble(result_reg,
4684 FieldMemOperand(input_reg, HeapNumber::kValueOffset));
4681 if (deoptimize_on_minus_zero) { 4685 if (deoptimize_on_minus_zero) {
4682 __ TestDoubleIsMinusZero(result_reg, scratch, ip); 4686 __ TestDoubleIsMinusZero(result_reg, scratch, ip);
4683 DeoptimizeIf(eq, instr, DeoptimizeReason::kMinusZero); 4687 DeoptimizeIf(eq, instr, DeoptimizeReason::kMinusZero);
4684 } 4688 }
4685 __ b(&done, Label::kNear); 4689 __ b(&done, Label::kNear);
4686 if (can_convert_undefined_to_nan) { 4690 if (can_convert_undefined_to_nan) {
4687 __ bind(&convert); 4691 __ bind(&convert);
4688 // Convert undefined (and hole) to NaN. 4692 // Convert undefined (and hole) to NaN.
4689 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex); 4693 __ CompareRoot(input_reg, Heap::kUndefinedValueRootIndex);
4690 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined); 4694 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined);
4691 __ LoadRoot(scratch, Heap::kNanValueRootIndex); 4695 __ LoadRoot(scratch, Heap::kNanValueRootIndex);
4692 __ ld(result_reg, FieldMemOperand(scratch, HeapNumber::kValueOffset)); 4696 __ LoadDouble(result_reg,
4697 FieldMemOperand(scratch, HeapNumber::kValueOffset));
4693 __ b(&done, Label::kNear); 4698 __ b(&done, Label::kNear);
4694 } 4699 }
4695 } else { 4700 } else {
4696 __ SmiUntag(scratch, input_reg); 4701 __ SmiUntag(scratch, input_reg);
4697 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI); 4702 DCHECK(mode == NUMBER_CANDIDATE_IS_SMI);
4698 } 4703 }
4699 // Smi to double register conversion 4704 // Smi to double register conversion
4700 __ bind(&load_smi); 4705 __ bind(&load_smi);
4701 // scratch: untagged value of input_reg 4706 // scratch: untagged value of input_reg
4702 __ ConvertIntToDouble(scratch, result_reg); 4707 __ ConvertIntToDouble(scratch, result_reg);
(...skipping 21 matching lines...) Expand all
4724 __ beq(&truncate); 4729 __ beq(&truncate);
4725 __ CompareInstanceType(scratch1, scratch1, ODDBALL_TYPE); 4730 __ CompareInstanceType(scratch1, scratch1, ODDBALL_TYPE);
4726 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotANumberOrOddball); 4731 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotANumberOrOddball);
4727 __ bind(&truncate); 4732 __ bind(&truncate);
4728 __ LoadRR(scratch2, input_reg); 4733 __ LoadRR(scratch2, input_reg);
4729 __ TruncateHeapNumberToI(input_reg, scratch2); 4734 __ TruncateHeapNumberToI(input_reg, scratch2);
4730 } else { 4735 } else {
4731 // Deoptimize if we don't have a heap number. 4736 // Deoptimize if we don't have a heap number.
4732 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber); 4737 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumber);
4733 4738
4734 __ ld(double_scratch2, 4739 __ LoadDouble(double_scratch2,
4735 FieldMemOperand(input_reg, HeapNumber::kValueOffset)); 4740 FieldMemOperand(input_reg, HeapNumber::kValueOffset));
4736 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 4741 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
4737 // preserve heap number pointer in scratch2 for minus zero check below 4742 // preserve heap number pointer in scratch2 for minus zero check below
4738 __ LoadRR(scratch2, input_reg); 4743 __ LoadRR(scratch2, input_reg);
4739 } 4744 }
4740 __ TryDoubleToInt32Exact(input_reg, double_scratch2, scratch1, 4745 __ TryDoubleToInt32Exact(input_reg, double_scratch2, scratch1,
4741 double_scratch); 4746 double_scratch);
4742 DeoptimizeIf(ne, instr, DeoptimizeReason::kLostPrecisionOrNaN); 4747 DeoptimizeIf(ne, instr, DeoptimizeReason::kLostPrecisionOrNaN);
4743 4748
4744 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) { 4749 if (instr->hydrogen()->CheckFlag(HValue::kBailoutOnMinusZero)) {
4745 __ CmpP(input_reg, Operand::Zero()); 4750 __ CmpP(input_reg, Operand::Zero());
(...skipping 293 matching lines...) Expand 10 before | Expand all | Expand 10 after
5039 5044
5040 // Check for undefined. Undefined is converted to zero for clamping 5045 // Check for undefined. Undefined is converted to zero for clamping
5041 // conversions. 5046 // conversions.
5042 __ CmpP(input_reg, Operand(factory()->undefined_value())); 5047 __ CmpP(input_reg, Operand(factory()->undefined_value()));
5043 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined); 5048 DeoptimizeIf(ne, instr, DeoptimizeReason::kNotAHeapNumberUndefined);
5044 __ LoadImmP(result_reg, Operand::Zero()); 5049 __ LoadImmP(result_reg, Operand::Zero());
5045 __ b(&done, Label::kNear); 5050 __ b(&done, Label::kNear);
5046 5051
5047 // Heap number 5052 // Heap number
5048 __ bind(&heap_number); 5053 __ bind(&heap_number);
5049 __ ld(temp_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset)); 5054 __ LoadDouble(temp_reg, FieldMemOperand(input_reg, HeapNumber::kValueOffset));
5050 __ ClampDoubleToUint8(result_reg, temp_reg, double_scratch0()); 5055 __ ClampDoubleToUint8(result_reg, temp_reg, double_scratch0());
5051 __ b(&done, Label::kNear); 5056 __ b(&done, Label::kNear);
5052 5057
5053 // smi 5058 // smi
5054 __ bind(&is_smi); 5059 __ bind(&is_smi);
5055 __ ClampUint8(result_reg, result_reg); 5060 __ ClampUint8(result_reg, result_reg);
5056 5061
5057 __ bind(&done); 5062 __ bind(&done);
5058 } 5063 }
5059 5064
(...skipping 480 matching lines...) Expand 10 before | Expand all | Expand 10 after
5540 __ LoadP(result, 5545 __ LoadP(result,
5541 FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize)); 5546 FieldMemOperand(scratch, FixedArray::kHeaderSize - kPointerSize));
5542 __ bind(deferred->exit()); 5547 __ bind(deferred->exit());
5543 __ bind(&done); 5548 __ bind(&done);
5544 } 5549 }
5545 5550
5546 #undef __ 5551 #undef __
5547 5552
5548 } // namespace internal 5553 } // namespace internal
5549 } // namespace v8 5554 } // namespace v8
OLDNEW
« no previous file with comments | « no previous file | no next file » | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698