OLD | NEW |
1 // Copyright 2006-2009 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2009 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1905 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1916 // They are invariant through a Math.Floor call, so just | 1916 // They are invariant through a Math.Floor call, so just |
1917 // return the original argument. | 1917 // return the original argument. |
1918 __ sub(r7, r6, Operand(HeapNumber::kExponentMask | 1918 __ sub(r7, r6, Operand(HeapNumber::kExponentMask |
1919 >> HeapNumber::kMantissaBitsInTopWord), SetCC); | 1919 >> HeapNumber::kMantissaBitsInTopWord), SetCC); |
1920 __ b(&restore_fpscr_and_return, eq); | 1920 __ b(&restore_fpscr_and_return, eq); |
1921 // We had an overflow or underflow in the conversion. Check if we | 1921 // We had an overflow or underflow in the conversion. Check if we |
1922 // have a big exponent. | 1922 // have a big exponent. |
1923 __ cmp(r7, Operand(HeapNumber::kMantissaBits)); | 1923 __ cmp(r7, Operand(HeapNumber::kMantissaBits)); |
1924 // If greater or equal, the argument is already round and in r0. | 1924 // If greater or equal, the argument is already round and in r0. |
1925 __ b(&restore_fpscr_and_return, ge); | 1925 __ b(&restore_fpscr_and_return, ge); |
1926 __ b(&slow); | 1926 __ b(&wont_fit_smi); |
1927 | 1927 |
1928 __ bind(&no_vfp_exception); | 1928 __ bind(&no_vfp_exception); |
1929 // Move the result back to general purpose register r0. | 1929 // Move the result back to general purpose register r0. |
1930 __ vmov(r0, s0); | 1930 __ vmov(r0, s0); |
1931 // Check if the result fits into a smi. | 1931 // Check if the result fits into a smi. |
1932 __ add(r1, r0, Operand(0x40000000), SetCC); | 1932 __ add(r1, r0, Operand(0x40000000), SetCC); |
1933 __ b(&wont_fit_smi, mi); | 1933 __ b(&wont_fit_smi, mi); |
1934 // Tag the result. | 1934 // Tag the result. |
1935 STATIC_ASSERT(kSmiTag == 0); | 1935 STATIC_ASSERT(kSmiTag == 0); |
1936 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); | 1936 __ mov(r0, Operand(r0, LSL, kSmiTagSize)); |
1937 | 1937 |
1938 // Check for -0. | 1938 // Check for -0. |
1939 __ cmp(r0, Operand(0)); | 1939 __ cmp(r0, Operand(0)); |
1940 __ b(&restore_fpscr_and_return, ne); | 1940 __ b(&restore_fpscr_and_return, ne); |
1941 // r5 already holds the HeapNumber exponent. | 1941 // r5 already holds the HeapNumber exponent. |
1942 __ tst(r5, Operand(HeapNumber::kSignMask)); | 1942 __ tst(r5, Operand(HeapNumber::kSignMask)); |
1943 // If our HeapNumber is negative it was -0, so load its address and return. | 1943 // If our HeapNumber is negative it was -0, so load its address and return. |
1944 // Else r0 is loaded with 0, so we can also just return. | 1944 // Else r0 is loaded with 0, so we can also just return. |
1945 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne); | 1945 __ ldr(r0, MemOperand(sp, 0 * kPointerSize), ne); |
1946 | 1946 |
1947 __ bind(&restore_fpscr_and_return); | 1947 __ bind(&restore_fpscr_and_return); |
1948 // Restore FPSCR and return. | 1948 // Restore FPSCR and return. |
1949 __ vmsr(r3); | 1949 __ vmsr(r3); |
1950 __ Drop(argc + 1); | 1950 __ Drop(argc + 1); |
1951 __ Ret(); | 1951 __ Ret(); |
1952 | 1952 |
1953 __ bind(&wont_fit_smi); | 1953 __ bind(&wont_fit_smi); |
1954 __ bind(&slow); | |
1955 // Restore FPCSR and fall to slow case. | 1954 // Restore FPCSR and fall to slow case. |
1956 __ vmsr(r3); | 1955 __ vmsr(r3); |
1957 | 1956 |
| 1957 __ bind(&slow); |
1958 // Tail call the full function. We do not have to patch the receiver | 1958 // Tail call the full function. We do not have to patch the receiver |
1959 // because the function makes no use of it. | 1959 // because the function makes no use of it. |
1960 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); | 1960 __ InvokeFunction(function, arguments(), JUMP_FUNCTION); |
1961 | 1961 |
1962 __ bind(&miss); | 1962 __ bind(&miss); |
1963 // r2: function name. | 1963 // r2: function name. |
1964 MaybeObject* obj = GenerateMissBranch(); | 1964 MaybeObject* obj = GenerateMissBranch(); |
1965 if (obj->IsFailure()) return obj; | 1965 if (obj->IsFailure()) return obj; |
1966 | 1966 |
1967 // Return the generated code. | 1967 // Return the generated code. |
(...skipping 1076 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3044 // Return the generated code. | 3044 // Return the generated code. |
3045 return GetCode(); | 3045 return GetCode(); |
3046 } | 3046 } |
3047 | 3047 |
3048 | 3048 |
3049 #undef __ | 3049 #undef __ |
3050 | 3050 |
3051 } } // namespace v8::internal | 3051 } } // namespace v8::internal |
3052 | 3052 |
3053 #endif // V8_TARGET_ARCH_ARM | 3053 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |