OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 755 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
766 } else { // PostIndex or NegPostIndex. | 766 } else { // PostIndex or NegPostIndex. |
767 ASSERT((dst.am() == PostIndex) || (dst.am() == NegPostIndex)); | 767 ASSERT((dst.am() == PostIndex) || (dst.am() == NegPostIndex)); |
768 dst2.set_offset(dst2.offset() - 4); | 768 dst2.set_offset(dst2.offset() - 4); |
769 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond); | 769 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond); |
770 str(src2, dst2, cond); | 770 str(src2, dst2, cond); |
771 } | 771 } |
772 } | 772 } |
773 } | 773 } |
774 | 774 |
775 | 775 |
| 776 void MacroAssembler::VFPEnsureFPSCRState(Register scratch) { |
| 777 // If needed, restore wanted bits of FPSCR. |
| 778 Label fpscr_done; |
| 779 vmrs(scratch); |
| 780 tst(scratch, Operand(kVFPDefaultNaNModeControlBit)); |
| 781 b(ne, &fpscr_done); |
| 782 orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit)); |
| 783 vmsr(scratch); |
| 784 bind(&fpscr_done); |
| 785 } |
| 786 |
| 787 void MacroAssembler::VFPCanonicalizeNaN(const DwVfpRegister value, |
| 788 const Condition cond) { |
| 789 vsub(value, value, kDoubleRegZero, cond); |
| 790 } |
| 791 |
| 792 |
776 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, | 793 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, |
777 const DwVfpRegister src2, | 794 const DwVfpRegister src2, |
778 const Condition cond) { | 795 const Condition cond) { |
779 // Compare and move FPSCR flags to the normal condition flags. | 796 // Compare and move FPSCR flags to the normal condition flags. |
780 VFPCompareAndLoadFlags(src1, src2, pc, cond); | 797 VFPCompareAndLoadFlags(src1, src2, pc, cond); |
781 } | 798 } |
782 | 799 |
783 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, | 800 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, |
784 const double src2, | 801 const double src2, |
785 const Condition cond) { | 802 const Condition cond) { |
(...skipping 1190 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1976 | 1993 |
1977 void MacroAssembler::StoreNumberToDoubleElements(Register value_reg, | 1994 void MacroAssembler::StoreNumberToDoubleElements(Register value_reg, |
1978 Register key_reg, | 1995 Register key_reg, |
1979 Register elements_reg, | 1996 Register elements_reg, |
1980 Register scratch1, | 1997 Register scratch1, |
1981 Register scratch2, | 1998 Register scratch2, |
1982 Register scratch3, | 1999 Register scratch3, |
1983 Register scratch4, | 2000 Register scratch4, |
1984 Label* fail, | 2001 Label* fail, |
1985 int elements_offset) { | 2002 int elements_offset) { |
1986 Label smi_value, maybe_nan, have_double_value, is_nan, done; | 2003 Label smi_value, store; |
1987 Register mantissa_reg = scratch2; | 2004 Register mantissa_reg = scratch2; |
1988 Register exponent_reg = scratch3; | 2005 Register exponent_reg = scratch3; |
1989 | 2006 |
1990 // Handle smi values specially. | 2007 // Handle smi values specially. |
1991 JumpIfSmi(value_reg, &smi_value); | 2008 JumpIfSmi(value_reg, &smi_value); |
1992 | 2009 |
1993 // Ensure that the object is a heap number | 2010 // Ensure that the object is a heap number |
1994 CheckMap(value_reg, | 2011 CheckMap(value_reg, |
1995 scratch1, | 2012 scratch1, |
1996 isolate()->factory()->heap_number_map(), | 2013 isolate()->factory()->heap_number_map(), |
1997 fail, | 2014 fail, |
1998 DONT_DO_SMI_CHECK); | 2015 DONT_DO_SMI_CHECK); |
1999 | 2016 |
2000 // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000 | 2017 vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset)); |
2001 // in the exponent. | 2018 // Force a canonical NaN. |
2002 mov(scratch1, Operand(kNaNOrInfinityLowerBoundUpper32)); | 2019 if (emit_debug_code()) { |
2003 ldr(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset)); | 2020 vmrs(ip); |
2004 cmp(exponent_reg, scratch1); | 2021 tst(ip, Operand(kVFPDefaultNaNModeControlBit)); |
2005 b(ge, &maybe_nan); | 2022 Assert(ne, "Default NaN mode not set"); |
| 2023 } |
| 2024 VFPCanonicalizeNaN(d0); |
| 2025 b(&store); |
2006 | 2026 |
2007 ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset)); | 2027 bind(&smi_value); |
| 2028 Register untagged_value = scratch1; |
| 2029 SmiUntag(untagged_value, value_reg); |
| 2030 FloatingPointHelper::ConvertIntToDouble( |
| 2031 this, untagged_value, FloatingPointHelper::kVFPRegisters, d0, |
| 2032 mantissa_reg, exponent_reg, scratch4, s2); |
2008 | 2033 |
2009 bind(&have_double_value); | 2034 bind(&store); |
2010 add(scratch1, elements_reg, | 2035 add(scratch1, elements_reg, |
2011 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize)); | 2036 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize)); |
2012 str(mantissa_reg, FieldMemOperand( | 2037 vstr(d0, FieldMemOperand(scratch1, |
2013 scratch1, FixedDoubleArray::kHeaderSize - elements_offset)); | 2038 FixedDoubleArray::kHeaderSize - elements_offset)); |
2014 uint32_t offset = FixedDoubleArray::kHeaderSize - elements_offset + | |
2015 sizeof(kHoleNanLower32); | |
2016 str(exponent_reg, FieldMemOperand(scratch1, offset)); | |
2017 jmp(&done); | |
2018 | |
2019 bind(&maybe_nan); | |
2020 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise | |
2021 // it's an Infinity, and the non-NaN code path applies. | |
2022 b(gt, &is_nan); | |
2023 ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset)); | |
2024 cmp(mantissa_reg, Operand::Zero()); | |
2025 b(eq, &have_double_value); | |
2026 bind(&is_nan); | |
2027 // Load canonical NaN for storing into the double array. | |
2028 uint64_t nan_int64 = BitCast<uint64_t>( | |
2029 FixedDoubleArray::canonical_not_the_hole_nan_as_double()); | |
2030 mov(mantissa_reg, Operand(static_cast<uint32_t>(nan_int64))); | |
2031 mov(exponent_reg, Operand(static_cast<uint32_t>(nan_int64 >> 32))); | |
2032 jmp(&have_double_value); | |
2033 | |
2034 bind(&smi_value); | |
2035 add(scratch1, elements_reg, | |
2036 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag - | |
2037 elements_offset)); | |
2038 add(scratch1, scratch1, | |
2039 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize)); | |
2040 // scratch1 is now effective address of the double element | |
2041 | |
2042 FloatingPointHelper::Destination destination; | |
2043 destination = FloatingPointHelper::kVFPRegisters; | |
2044 | |
2045 Register untagged_value = elements_reg; | |
2046 SmiUntag(untagged_value, value_reg); | |
2047 FloatingPointHelper::ConvertIntToDouble(this, | |
2048 untagged_value, | |
2049 destination, | |
2050 d0, | |
2051 mantissa_reg, | |
2052 exponent_reg, | |
2053 scratch4, | |
2054 s2); | |
2055 if (destination == FloatingPointHelper::kVFPRegisters) { | |
2056 vstr(d0, scratch1, 0); | |
2057 } else { | |
2058 str(mantissa_reg, MemOperand(scratch1, 0)); | |
2059 str(exponent_reg, MemOperand(scratch1, Register::kSizeInBytes)); | |
2060 } | |
2061 bind(&done); | |
2062 } | 2039 } |
2063 | 2040 |
2064 | 2041 |
2065 void MacroAssembler::CompareMap(Register obj, | 2042 void MacroAssembler::CompareMap(Register obj, |
2066 Register scratch, | 2043 Register scratch, |
2067 Handle<Map> map, | 2044 Handle<Map> map, |
2068 Label* early_success, | 2045 Label* early_success, |
2069 CompareMapMode mode) { | 2046 CompareMapMode mode) { |
2070 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); | 2047 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); |
2071 CompareMap(scratch, map, early_success, mode); | 2048 CompareMap(scratch, map, early_success, mode); |
(...skipping 1787 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3859 void CodePatcher::EmitCondition(Condition cond) { | 3836 void CodePatcher::EmitCondition(Condition cond) { |
3860 Instr instr = Assembler::instr_at(masm_.pc_); | 3837 Instr instr = Assembler::instr_at(masm_.pc_); |
3861 instr = (instr & ~kCondMask) | cond; | 3838 instr = (instr & ~kCondMask) | cond; |
3862 masm_.emit(instr); | 3839 masm_.emit(instr); |
3863 } | 3840 } |
3864 | 3841 |
3865 | 3842 |
3866 } } // namespace v8::internal | 3843 } } // namespace v8::internal |
3867 | 3844 |
3868 #endif // V8_TARGET_ARCH_ARM | 3845 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |