| OLD | NEW |
| 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1952 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1963 // Move the result back to general purpose register r0. | 1963 // Move the result back to general purpose register r0. |
| 1964 __ vmov(r0, s0); | 1964 __ vmov(r0, s0); |
| 1965 // Check if the result fits into a smi. | 1965 // Check if the result fits into a smi. |
| 1966 __ add(r1, r0, Operand(0x40000000), SetCC); | 1966 __ add(r1, r0, Operand(0x40000000), SetCC); |
| 1967 __ b(&wont_fit_smi, mi); | 1967 __ b(&wont_fit_smi, mi); |
| 1968 // Tag the result. | 1968 // Tag the result. |
| 1969 STATIC_ASSERT(kSmiTag == 0); | 1969 STATIC_ASSERT(kSmiTag == 0); |
| 1970 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); | 1970 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
| 1971 | 1971 |
| 1972 // Check for -0. | 1972 // Check for -0. |
| 1973 __ cmp(r0, Operand(0)); | 1973 __ cmp(r0, Operand(0, RelocInfo::NONE)); |
| 1974 __ b(&restore_fpscr_and_return, ne); | 1974 __ b(&restore_fpscr_and_return, ne); |
| 1975 // r5 already holds the HeapNumber exponent. | 1975 // r5 already holds the HeapNumber exponent. |
| 1976 __ tst(r5, Operand(HeapNumber::kSignMask)); | 1976 __ tst(r5, Operand(HeapNumber::kSignMask)); |
| 1977 // If our HeapNumber is negative it was -0, so load its address and return. | 1977 // If our HeapNumber is negative it was -0, so load its address and return. |
| 1978 // Else r0 is loaded with 0, so we can also just return. | 1978 // Else r0 is loaded with 0, so we can also just return. |
| 1979 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne); | 1979 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne); |
| 1980 | 1980 |
| 1981 __ bind(&restore_fpscr_and_return); | 1981 __ bind(&restore_fpscr_and_return); |
| 1982 // Restore FPSCR and return. | 1982 // Restore FPSCR and return. |
| 1983 __ vmsr(r3); | 1983 __ vmsr(r3); |
| (...skipping 1228 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 3212 // Return the generated code. | 3212 // Return the generated code. |
| 3213 return GetCode(); | 3213 return GetCode(); |
| 3214 } | 3214 } |
| 3215 | 3215 |
| 3216 | 3216 |
| 3217 #undef __ | 3217 #undef __ |
| 3218 | 3218 |
| 3219 } } // namespace v8::internal | 3219 } } // namespace v8::internal |
| 3220 | 3220 |
| 3221 #endif // V8_TARGET_ARCH_ARM | 3221 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |