OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1934 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1945 // They are invariant through a Math.Floor call, so just | 1945 // They are invariant through a Math.Floor call, so just |
1946 // return the original argument. | 1946 // return the original argument. |
1947 __ sub(r7, r6, Operand(HeapNumber::kExponentMask | 1947 __ sub(r7, r6, Operand(HeapNumber::kExponentMask |
1948 >> HeapNumber::kMantissaBitsInTopWord), SetCC); | 1948 >> HeapNumber::kMantissaBitsInTopWord), SetCC); |
1949 __ b(&restore_fpscr_and_return, eq); | 1949 __ b(&restore_fpscr_and_return, eq); |
1950 // We had an overflow or underflow in the conversion. Check if we | 1950 // We had an overflow or underflow in the conversion. Check if we |
1951 // have a big exponent. | 1951 // have a big exponent. |
1952 __ cmp(r7, Operand(HeapNumber::kMantissaBits)); | 1952 __ cmp(r7, Operand(HeapNumber::kMantissaBits)); |
1953 // If greater or equal, the argument is already round and in r0. | 1953 // If greater or equal, the argument is already round and in r0. |
1954 __ b(&restore_fpscr_and_return, ge); | 1954 __ b(&restore_fpscr_and_return, ge); |
1955 __ b(&slow); | 1955 __ b(&wont_fit_smi); |
1956 | 1956 |
1957 __ bind(&no_vfp_exception); | 1957 __ bind(&no_vfp_exception); |
1958 // Move the result back to general purpose register r0. | 1958 // Move the result back to general purpose register r0. |
1959 __ vmov(r0, s0); | 1959 __ vmov(r0, s0); |
1960 // Check if the result fits into a smi. | 1960 // Check if the result fits into a smi. |
1961 __ add(r1, r0, Operand(0x40000000), SetCC); | 1961 __ add(r1, r0, Operand(0x40000000), SetCC); |
1962 __ b(&wont_fit_smi, mi); | 1962 __ b(&wont_fit_smi, mi); |
1963 // Tag the result. | 1963 // Tag the result. |
1964 STATIC_ASSERT(kSmiTag == 0); | 1964 STATIC_ASSERT(kSmiTag == 0); |
1965 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); | 1965 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
1966 | 1966 |
1967 // Check for -0. | 1967 // Check for -0. |
1968 __ cmp(r0, Operand(0, RelocInfo::NONE)); | 1968 __ cmp(r0, Operand(0, RelocInfo::NONE)); |
1969 __ b(&restore_fpscr_and_return, ne); | 1969 __ b(&restore_fpscr_and_return, ne); |
1970 // r5 already holds the HeapNumber exponent. | 1970 // r5 already holds the HeapNumber exponent. |
1971 __ tst(r5, Operand(HeapNumber::kSignMask)); | 1971 __ tst(r5, Operand(HeapNumber::kSignMask)); |
1972 // If our HeapNumber is negative it was -0, so load its address and return. | 1972 // If our HeapNumber is negative it was -0, so load its address and return. |
1973 // Else r0 is loaded with 0, so we can also just return. | 1973 // Else r0 is loaded with 0, so we can also just return. |
1974 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne); | 1974 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne); |
1975 | 1975 |
1976 __ bind(&restore_fpscr_and_return); | 1976 __ bind(&restore_fpscr_and_return); |
1977 // Restore FPSCR and return. | 1977 // Restore FPSCR and return. |
1978 __ vmsr(r3); | 1978 __ vmsr(r3); |
1979 __ Drop(argc + 1); | 1979 __ Drop(argc + 1); |
1980 __ Ret(); | 1980 __ Ret(); |
1981 | 1981 |
1982 __ bind(&wont_fit_smi); | 1982 __ bind(&wont_fit_smi); |
1983 __ bind(&slow); | |
1984 // Restore FPCSR and fall to slow case. | 1983 // Restore FPCSR and fall to slow case. |
1985 __ vmsr(r3); | 1984 __ vmsr(r3); |
1986 | 1985 |
| 1986 __ bind(&slow); |
1987 // Tail call the full function. We do not have to patch the receiver | 1987 // Tail call the full function. We do not have to patch the receiver |
1988 // because the function makes no use of it. | 1988 // because the function makes no use of it. |
1989 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); | 1989 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); |
1990 | 1990 |
1991 __ bind(&miss); | 1991 __ bind(&miss); |
1992 // r2: function name. | 1992 // r2: function name. |
1993 MaybeObject* obj = GenerateMissBranch(); | 1993 MaybeObject* obj = GenerateMissBranch(); |
1994 if (obj->IsFailure()) return obj; | 1994 if (obj->IsFailure()) return obj; |
1995 | 1995 |
1996 // Return the generated code. | 1996 // Return the generated code. |
(...skipping 1203 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3200 // Return the generated code. | 3200 // Return the generated code. |
3201 return GetCode(); | 3201 return GetCode(); |
3202 } | 3202 } |
3203 | 3203 |
3204 | 3204 |
3205 #undef __ | 3205 #undef __ |
3206 | 3206 |
3207 } } // namespace v8::internal | 3207 } } // namespace v8::internal |
3208 | 3208 |
3209 #endif // V8_TARGET_ARCH_ARM | 3209 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |