OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1984 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1995 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset)); | 1995 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset)); |
1996 __ mov(v0, a1); | 1996 __ mov(v0, a1); |
1997 } | 1997 } |
1998 __ Ret(); | 1998 __ Ret(); |
1999 } | 1999 } |
2000 | 2000 |
2001 | 2001 |
2002 void UnaryOpStub::GenerateHeapNumberCodeBitNot( | 2002 void UnaryOpStub::GenerateHeapNumberCodeBitNot( |
2003 MacroAssembler* masm, | 2003 MacroAssembler* masm, |
2004 Label* slow) { | 2004 Label* slow) { |
| 2005 Label impossible; |
| 2006 |
2005 EmitCheckForHeapNumber(masm, a0, a1, t2, slow); | 2007 EmitCheckForHeapNumber(masm, a0, a1, t2, slow); |
2006 // Convert the heap number in a0 to an untagged integer in a1. | 2008 // Convert the heap number in a0 to an untagged integer in a1. |
2007 __ ConvertToInt32(a0, a1, a2, a3, f0, slow); | 2009 __ ConvertToInt32(a0, a1, a2, a3, f0, slow); |
2008 | 2010 |
2009 // Do the bitwise operation and check if the result fits in a smi. | 2011 // Do the bitwise operation and check if the result fits in a smi. |
2010 Label try_float; | 2012 Label try_float; |
2011 __ Neg(a1, a1); | 2013 __ Neg(a1, a1); |
2012 __ Addu(a2, a1, Operand(0x40000000)); | 2014 __ Addu(a2, a1, Operand(0x40000000)); |
2013 __ Branch(&try_float, lt, a2, Operand(zero_reg)); | 2015 __ Branch(&try_float, lt, a2, Operand(zero_reg)); |
2014 | 2016 |
2015 // Tag the result as a smi and we're done. | 2017 // Tag the result as a smi and we're done. |
2016 __ SmiTag(v0, a1); | 2018 __ SmiTag(v0, a1); |
2017 __ Ret(); | 2019 __ Ret(); |
2018 | 2020 |
2019 // Try to store the result in a heap number. | 2021 // Try to store the result in a heap number. |
2020 __ bind(&try_float); | 2022 __ bind(&try_float); |
2021 if (mode_ == UNARY_NO_OVERWRITE) { | 2023 if (mode_ == UNARY_NO_OVERWRITE) { |
2022 Label slow_allocate_heapnumber, heapnumber_allocated; | 2024 Label slow_allocate_heapnumber, heapnumber_allocated; |
2023 __ AllocateHeapNumber(v0, a2, a3, t2, &slow_allocate_heapnumber); | 2025 // Allocate a new heap number without zapping v0, which we need if it fails. |
| 2026 __ AllocateHeapNumber(a2, a3, t0, t2, &slow_allocate_heapnumber); |
2024 __ jmp(&heapnumber_allocated); | 2027 __ jmp(&heapnumber_allocated); |
2025 | 2028 |
2026 __ bind(&slow_allocate_heapnumber); | 2029 __ bind(&slow_allocate_heapnumber); |
2027 __ EnterInternalFrame(); | 2030 __ EnterInternalFrame(); |
2028 __ push(a1); | 2031 __ push(v0); // Push the heap number, not the untagged int32. |
2029 __ CallRuntime(Runtime::kNumberAlloc, 0); | 2032 __ CallRuntime(Runtime::kNumberAlloc, 0); |
2030 __ pop(a1); | 2033 __ mov(a2, v0); // Move the new heap number into a2. |
| 2034 // Get the heap number into v0, now that the new heap number is in a2. |
| 2035 __ pop(v0); |
2031 __ LeaveInternalFrame(); | 2036 __ LeaveInternalFrame(); |
2032 | 2037 |
| 2038 // Convert the heap number in v0 to an untagged integer in a1. |
| 2039 // This can't go slow-case because it's the same number we already |
| 2040 // converted once again. |
| 2041 __ ConvertToInt32(v0, a1, a3, t0, f0, &impossible); |
| 2042 // Negate the result. |
| 2043 __ Xor(a1, a1, -1); |
| 2044 |
2033 __ bind(&heapnumber_allocated); | 2045 __ bind(&heapnumber_allocated); |
| 2046 __ mov(v0, a2); // Move newly allocated heap number to v0. |
2034 } | 2047 } |
2035 | 2048 |
2036 if (CpuFeatures::IsSupported(FPU)) { | 2049 if (CpuFeatures::IsSupported(FPU)) { |
2037 // Convert the int32 in a1 to the heap number in v0. a2 is corrupted. | 2050 // Convert the int32 in a1 to the heap number in v0. a2 is corrupted. |
2038 CpuFeatures::Scope scope(FPU); | 2051 CpuFeatures::Scope scope(FPU); |
2039 __ mtc1(a1, f0); | 2052 __ mtc1(a1, f0); |
2040 __ cvt_d_w(f0, f0); | 2053 __ cvt_d_w(f0, f0); |
2041 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset)); | 2054 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset)); |
2042 __ Ret(); | 2055 __ Ret(); |
2043 } else { | 2056 } else { |
2044 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not | 2057 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not |
2045 // have to set up a frame. | 2058 // have to set up a frame. |
2046 WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3); | 2059 WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3); |
2047 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 2060 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
2048 } | 2061 } |
| 2062 |
| 2063 __ bind(&impossible); |
| 2064 if (FLAG_debug_code) { |
| 2065 __ stop("Incorrect assumption in bit-not stub"); |
| 2066 } |
2049 } | 2067 } |
2050 | 2068 |
2051 | 2069 |
2052 // TODO(svenpanne): Use virtual functions instead of switch. | 2070 // TODO(svenpanne): Use virtual functions instead of switch. |
2053 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { | 2071 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
2054 switch (op_) { | 2072 switch (op_) { |
2055 case Token::SUB: | 2073 case Token::SUB: |
2056 GenerateGenericStubSub(masm); | 2074 GenerateGenericStubSub(masm); |
2057 break; | 2075 break; |
2058 case Token::BIT_NOT: | 2076 case Token::BIT_NOT: |
(...skipping 4588 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
6647 __ mov(result, zero_reg); | 6665 __ mov(result, zero_reg); |
6648 __ Ret(); | 6666 __ Ret(); |
6649 } | 6667 } |
6650 | 6668 |
6651 | 6669 |
6652 #undef __ | 6670 #undef __ |
6653 | 6671 |
6654 } } // namespace v8::internal | 6672 } } // namespace v8::internal |
6655 | 6673 |
6656 #endif // V8_TARGET_ARCH_MIPS | 6674 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |