OLD | NEW |
1 // Copyright 2006-2008 the V8 project authors. All rights reserved. | 1 // Copyright 2006-2008 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1584 // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to | 1584 // Extract mantissa to r0. OK to clobber r0 now as there are no jumps to |
1585 // the slow case from here. | 1585 // the slow case from here. |
1586 __ and_(r0, value, Operand(kBinary32MantissaMask)); | 1586 __ and_(r0, value, Operand(kBinary32MantissaMask)); |
1587 | 1587 |
1588 // Extract exponent to r1. OK to clobber r1 now as there are no jumps to | 1588 // Extract exponent to r1. OK to clobber r1 now as there are no jumps to |
1589 // the slow case from here. | 1589 // the slow case from here. |
1590 __ mov(r1, Operand(value, LSR, kBinary32MantissaBits)); | 1590 __ mov(r1, Operand(value, LSR, kBinary32MantissaBits)); |
1591 __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits)); | 1591 __ and_(r1, r1, Operand(kBinary32ExponentMask >> kBinary32MantissaBits)); |
1592 | 1592 |
1593 Label exponent_rebiased; | 1593 Label exponent_rebiased; |
1594 __ teq(r1, Operand(0x00)); | 1594 __ teq(r1, Operand(0x00, RelocInfo::NONE)); |
1595 __ b(eq, &exponent_rebiased); | 1595 __ b(eq, &exponent_rebiased); |
1596 | 1596 |
1597 __ teq(r1, Operand(0xff)); | 1597 __ teq(r1, Operand(0xff)); |
1598 __ mov(r1, Operand(0x7ff), LeaveCC, eq); | 1598 __ mov(r1, Operand(0x7ff), LeaveCC, eq); |
1599 __ b(eq, &exponent_rebiased); | 1599 __ b(eq, &exponent_rebiased); |
1600 | 1600 |
1601 // Rebias exponent. | 1601 // Rebias exponent. |
1602 __ add(r1, | 1602 __ add(r1, |
1603 r1, | 1603 r1, |
1604 Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias)); | 1604 Operand(-kBinary32ExponentBias + HeapNumber::kExponentBias)); |
(...skipping 778 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2383 | 2383 |
2384 | 2384 |
2385 void PatchInlinedSmiCode(Address address) { | 2385 void PatchInlinedSmiCode(Address address) { |
2386 UNIMPLEMENTED(); | 2386 UNIMPLEMENTED(); |
2387 } | 2387 } |
2388 | 2388 |
2389 | 2389 |
2390 } } // namespace v8::internal | 2390 } } // namespace v8::internal |
2391 | 2391 |
2392 #endif // V8_TARGET_ARCH_ARM | 2392 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |