OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1599 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1610 // immediately below so this use of ip does not cause difference with | 1610 // immediately below so this use of ip does not cause difference with |
1611 // respect to register content between debug and release mode. | 1611 // respect to register content between debug and release mode. |
1612 ldr(ip, MemOperand(topaddr)); | 1612 ldr(ip, MemOperand(topaddr)); |
1613 cmp(result, ip); | 1613 cmp(result, ip); |
1614 Check(eq, "Unexpected allocation top"); | 1614 Check(eq, "Unexpected allocation top"); |
1615 } | 1615 } |
1616 // Load allocation limit into ip. Result already contains allocation top. | 1616 // Load allocation limit into ip. Result already contains allocation top. |
1617 ldr(ip, MemOperand(topaddr, limit - top)); | 1617 ldr(ip, MemOperand(topaddr, limit - top)); |
1618 } | 1618 } |
1619 | 1619 |
| 1620 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
| 1621 // Align the next allocation. Storing the filler map without checking top is |
| 1622 // always safe because the limit of the heap is always aligned. |
| 1623 and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC); |
| 1624 Label aligned; |
| 1625 b(eq, &aligned); |
| 1626 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); |
| 1627 str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex)); |
| 1628 bind(&aligned); |
| 1629 } |
| 1630 |
1620 // Calculate new top and bail out if new space is exhausted. Use result | 1631 // Calculate new top and bail out if new space is exhausted. Use result |
1621 // to calculate the new top. | 1632 // to calculate the new top. |
1622 if (obj_size_operand.is_single_instruction(this)) { | 1633 if (obj_size_operand.is_single_instruction(this)) { |
1623 // We can add the size as an immediate | 1634 // We can add the size as an immediate |
1624 add(scratch2, result, obj_size_operand, SetCC); | 1635 add(scratch2, result, obj_size_operand, SetCC); |
1625 } else { | 1636 } else { |
1626 // Doesn't fit in an immediate, we have to use the register | 1637 // Doesn't fit in an immediate, we have to use the register |
1627 add(scratch2, result, obj_size_reg, SetCC); | 1638 add(scratch2, result, obj_size_reg, SetCC); |
1628 } | 1639 } |
1629 b(cs, gc_required); | 1640 b(cs, gc_required); |
(...skipping 65 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1695 // immediately below so this use of ip does not cause difference with | 1706 // immediately below so this use of ip does not cause difference with |
1696 // respect to register content between debug and release mode. | 1707 // respect to register content between debug and release mode. |
1697 ldr(ip, MemOperand(topaddr)); | 1708 ldr(ip, MemOperand(topaddr)); |
1698 cmp(result, ip); | 1709 cmp(result, ip); |
1699 Check(eq, "Unexpected allocation top"); | 1710 Check(eq, "Unexpected allocation top"); |
1700 } | 1711 } |
1701 // Load allocation limit into ip. Result already contains allocation top. | 1712 // Load allocation limit into ip. Result already contains allocation top. |
1702 ldr(ip, MemOperand(topaddr, limit - top)); | 1713 ldr(ip, MemOperand(topaddr, limit - top)); |
1703 } | 1714 } |
1704 | 1715 |
| 1716 if ((flags & DOUBLE_ALIGNMENT) != 0) { |
| 1717 // Align the next allocation. Storing the filler map without checking top is |
| 1718 // always safe because the limit of the heap is always aligned. |
| 1719 and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC); |
| 1720 Label aligned; |
| 1721 b(eq, &aligned); |
| 1722 mov(scratch2, Operand(isolate()->factory()->one_pointer_filler_map())); |
| 1723 str(scratch2, MemOperand(result, kDoubleSize / 2, PostIndex)); |
| 1724 bind(&aligned); |
| 1725 } |
| 1726 |
1705 // Calculate new top and bail out if new space is exhausted. Use result | 1727 // Calculate new top and bail out if new space is exhausted. Use result |
1706 // to calculate the new top. Object size may be in words so a shift is | 1728 // to calculate the new top. Object size may be in words so a shift is |
1707 // required to get the number of bytes. | 1729 // required to get the number of bytes. |
1708 if ((flags & SIZE_IN_WORDS) != 0) { | 1730 if ((flags & SIZE_IN_WORDS) != 0) { |
1709 add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC); | 1731 add(scratch2, result, Operand(object_size, LSL, kPointerSizeLog2), SetCC); |
1710 } else { | 1732 } else { |
1711 add(scratch2, result, Operand(object_size), SetCC); | 1733 add(scratch2, result, Operand(object_size), SetCC); |
1712 } | 1734 } |
1713 b(cs, gc_required); | 1735 b(cs, gc_required); |
1714 cmp(scratch2, Operand(ip)); | 1736 cmp(scratch2, Operand(ip)); |
(...skipping 2201 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3916 void CodePatcher::EmitCondition(Condition cond) { | 3938 void CodePatcher::EmitCondition(Condition cond) { |
3917 Instr instr = Assembler::instr_at(masm_.pc_); | 3939 Instr instr = Assembler::instr_at(masm_.pc_); |
3918 instr = (instr & ~kCondMask) | cond; | 3940 instr = (instr & ~kCondMask) | cond; |
3919 masm_.emit(instr); | 3941 masm_.emit(instr); |
3920 } | 3942 } |
3921 | 3943 |
3922 | 3944 |
3923 } } // namespace v8::internal | 3945 } } // namespace v8::internal |
3924 | 3946 |
3925 #endif // V8_TARGET_ARCH_ARM | 3947 #endif // V8_TARGET_ARCH_ARM |
OLD | NEW |