Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(380)

Side by Side Diff: src/arm/macro-assembler-arm.cc

Issue 14109010: ARM: Enable VFP default NaN mode (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 7 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 755 matching lines...) Expand 10 before | Expand all | Expand 10 after
766 } else { // PostIndex or NegPostIndex. 766 } else { // PostIndex or NegPostIndex.
767 ASSERT((dst.am() == PostIndex) || (dst.am() == NegPostIndex)); 767 ASSERT((dst.am() == PostIndex) || (dst.am() == NegPostIndex));
768 dst2.set_offset(dst2.offset() - 4); 768 dst2.set_offset(dst2.offset() - 4);
769 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond); 769 str(src1, MemOperand(dst.rn(), 4, PostIndex), cond);
770 str(src2, dst2, cond); 770 str(src2, dst2, cond);
771 } 771 }
772 } 772 }
773 } 773 }
774 774
775 775
776 void MacroAssembler::VFPEnsureFPSCRState(Register scratch) {
777 // If needed, restore wanted bits of FPSCR.
778 Label fpscr_done;
779 vmrs(scratch);
780 tst(scratch, Operand(kVFPDefaultNaNModeControlBit));
781 b(ne, &fpscr_done);
782 orr(scratch, scratch, Operand(kVFPDefaultNaNModeControlBit));
783 vmsr(scratch);
784 bind(&fpscr_done);
785 }
786
787
776 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, 788 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
777 const DwVfpRegister src2, 789 const DwVfpRegister src2,
778 const Condition cond) { 790 const Condition cond) {
779 // Compare and move FPSCR flags to the normal condition flags. 791 // Compare and move FPSCR flags to the normal condition flags.
780 VFPCompareAndLoadFlags(src1, src2, pc, cond); 792 VFPCompareAndLoadFlags(src1, src2, pc, cond);
781 } 793 }
782 794
783 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1, 795 void MacroAssembler::VFPCompareAndSetFlags(const DwVfpRegister src1,
784 const double src2, 796 const double src2,
785 const Condition cond) { 797 const Condition cond) {
(...skipping 1190 matching lines...) Expand 10 before | Expand all | Expand 10 after
1976 1988
1977 void MacroAssembler::StoreNumberToDoubleElements(Register value_reg, 1989 void MacroAssembler::StoreNumberToDoubleElements(Register value_reg,
1978 Register key_reg, 1990 Register key_reg,
1979 Register elements_reg, 1991 Register elements_reg,
1980 Register scratch1, 1992 Register scratch1,
1981 Register scratch2, 1993 Register scratch2,
1982 Register scratch3, 1994 Register scratch3,
1983 Register scratch4, 1995 Register scratch4,
1984 Label* fail, 1996 Label* fail,
1985 int elements_offset) { 1997 int elements_offset) {
1986 Label smi_value, maybe_nan, have_double_value, is_nan, done; 1998 Label smi_value, store;
1987 Register mantissa_reg = scratch2; 1999 Register mantissa_reg = scratch2;
1988 Register exponent_reg = scratch3; 2000 Register exponent_reg = scratch3;
1989 2001
1990 // Handle smi values specially. 2002 // Handle smi values specially.
1991 JumpIfSmi(value_reg, &smi_value); 2003 JumpIfSmi(value_reg, &smi_value);
1992 2004
1993 // Ensure that the object is a heap number 2005 // Ensure that the object is a heap number
1994 CheckMap(value_reg, 2006 CheckMap(value_reg,
1995 scratch1, 2007 scratch1,
1996 isolate()->factory()->heap_number_map(), 2008 isolate()->factory()->heap_number_map(),
1997 fail, 2009 fail,
1998 DONT_DO_SMI_CHECK); 2010 DONT_DO_SMI_CHECK);
1999 2011
2000 // Check for nan: all NaN values have a value greater (signed) than 0x7ff00000 2012 vldr(d0, FieldMemOperand(value_reg, HeapNumber::kValueOffset));
2001 // in the exponent. 2013 // Force a canonical NaN.
2002 mov(scratch1, Operand(kNaNOrInfinityLowerBoundUpper32)); 2014 if (emit_debug_code()) {
2003 ldr(exponent_reg, FieldMemOperand(value_reg, HeapNumber::kExponentOffset)); 2015 vmrs(ip);
2004 cmp(exponent_reg, scratch1); 2016 tst(ip, Operand(kVFPDefaultNaNModeControlBit));
2005 b(ge, &maybe_nan); 2017 Assert(ne, "Default NaN mode not set");
2018 }
2019 vsub(d0, d0, kDoubleRegZero);
2020 b(&store);
2006 2021
2007 ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset)); 2022 bind(&smi_value);
2023 Register untagged_value = scratch1;
2024 SmiUntag(untagged_value, value_reg);
2025 FloatingPointHelper::ConvertIntToDouble(
2026 this, untagged_value, FloatingPointHelper::kVFPRegisters, d0,
2027 mantissa_reg, exponent_reg, scratch4, s2);
2008 2028
2009 bind(&have_double_value); 2029 bind(&store);
2010 add(scratch1, elements_reg, 2030 add(scratch1, elements_reg,
2011 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize)); 2031 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
2012 str(mantissa_reg, FieldMemOperand( 2032 vstr(d0, FieldMemOperand(scratch1,
ulan 2013/04/15 07:43:01 Indentation is off.
2013 scratch1, FixedDoubleArray::kHeaderSize - elements_offset)); 2033 FixedDoubleArray::kHeaderSize - elements_offset));
2014 uint32_t offset = FixedDoubleArray::kHeaderSize - elements_offset +
2015 sizeof(kHoleNanLower32);
2016 str(exponent_reg, FieldMemOperand(scratch1, offset));
2017 jmp(&done);
2018
2019 bind(&maybe_nan);
2020 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
2021 // it's an Infinity, and the non-NaN code path applies.
2022 b(gt, &is_nan);
2023 ldr(mantissa_reg, FieldMemOperand(value_reg, HeapNumber::kMantissaOffset));
2024 cmp(mantissa_reg, Operand::Zero());
2025 b(eq, &have_double_value);
2026 bind(&is_nan);
2027 // Load canonical NaN for storing into the double array.
2028 uint64_t nan_int64 = BitCast<uint64_t>(
2029 FixedDoubleArray::canonical_not_the_hole_nan_as_double());
2030 mov(mantissa_reg, Operand(static_cast<uint32_t>(nan_int64)));
2031 mov(exponent_reg, Operand(static_cast<uint32_t>(nan_int64 >> 32)));
2032 jmp(&have_double_value);
2033
2034 bind(&smi_value);
2035 add(scratch1, elements_reg,
2036 Operand(FixedDoubleArray::kHeaderSize - kHeapObjectTag -
2037 elements_offset));
2038 add(scratch1, scratch1,
2039 Operand(key_reg, LSL, kDoubleSizeLog2 - kSmiTagSize));
2040 // scratch1 is now effective address of the double element
2041
2042 FloatingPointHelper::Destination destination;
2043 destination = FloatingPointHelper::kVFPRegisters;
2044
2045 Register untagged_value = elements_reg;
2046 SmiUntag(untagged_value, value_reg);
2047 FloatingPointHelper::ConvertIntToDouble(this,
2048 untagged_value,
2049 destination,
2050 d0,
2051 mantissa_reg,
2052 exponent_reg,
2053 scratch4,
2054 s2);
2055 if (destination == FloatingPointHelper::kVFPRegisters) {
2056 vstr(d0, scratch1, 0);
2057 } else {
2058 str(mantissa_reg, MemOperand(scratch1, 0));
2059 str(exponent_reg, MemOperand(scratch1, Register::kSizeInBytes));
2060 }
2061 bind(&done);
2062 } 2034 }
2063 2035
2064 2036
2065 void MacroAssembler::CompareMap(Register obj, 2037 void MacroAssembler::CompareMap(Register obj,
2066 Register scratch, 2038 Register scratch,
2067 Handle<Map> map, 2039 Handle<Map> map,
2068 Label* early_success, 2040 Label* early_success,
2069 CompareMapMode mode) { 2041 CompareMapMode mode) {
2070 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset)); 2042 ldr(scratch, FieldMemOperand(obj, HeapObject::kMapOffset));
2071 CompareMap(scratch, map, early_success, mode); 2043 CompareMap(scratch, map, early_success, mode);
(...skipping 1787 matching lines...) Expand 10 before | Expand all | Expand 10 after
3859 void CodePatcher::EmitCondition(Condition cond) { 3831 void CodePatcher::EmitCondition(Condition cond) {
3860 Instr instr = Assembler::instr_at(masm_.pc_); 3832 Instr instr = Assembler::instr_at(masm_.pc_);
3861 instr = (instr & ~kCondMask) | cond; 3833 instr = (instr & ~kCondMask) | cond;
3862 masm_.emit(instr); 3834 masm_.emit(instr);
3863 } 3835 }
3864 3836
3865 3837
3866 } } // namespace v8::internal 3838 } } // namespace v8::internal
3867 3839
3868 #endif // V8_TARGET_ARCH_ARM 3840 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698