OLD | NEW |
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
2 // All Rights Reserved. | 2 // All Rights Reserved. |
3 // | 3 // |
4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
6 // are met: | 6 // are met: |
7 // | 7 // |
8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
10 // | 10 // |
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
142 // ----------------------------------------------------------------------------- | 142 // ----------------------------------------------------------------------------- |
143 // Implementation of RelocInfo | 143 // Implementation of RelocInfo |
144 | 144 |
145 const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE | | 145 const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE | |
146 1 << RelocInfo::INTERNAL_REFERENCE_ENCODED; | 146 1 << RelocInfo::INTERNAL_REFERENCE_ENCODED; |
147 | 147 |
148 | 148 |
149 bool RelocInfo::IsCodedSpecially() { | 149 bool RelocInfo::IsCodedSpecially() { |
150 // The deserializer needs to know whether a pointer is specially | 150 // The deserializer needs to know whether a pointer is specially |
151 // coded. Being specially coded on PPC means that it is a lis/ori | 151 // coded. Being specially coded on PPC means that it is a lis/ori |
152 // instruction sequence or is an out of line constant pool entry, | 152 // instruction sequence, and these are always the case inside code |
153 // and these are always the case inside code objects. | 153 // objects. |
154 return true; | 154 return true; |
155 } | 155 } |
156 | 156 |
157 | 157 |
158 bool RelocInfo::IsInConstantPool() { | 158 bool RelocInfo::IsInConstantPool() { |
159 #if V8_OOL_CONSTANT_POOL | |
160 return Assembler::IsConstantPoolLoadStart(pc_); | |
161 #else | |
162 return false; | 159 return false; |
163 #endif | |
164 } | 160 } |
165 | 161 |
166 | 162 |
167 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { | 163 void RelocInfo::PatchCode(byte* instructions, int instruction_count) { |
168 // Patch the code at the current address with the supplied instructions. | 164 // Patch the code at the current address with the supplied instructions. |
169 Instr* pc = reinterpret_cast<Instr*>(pc_); | 165 Instr* pc = reinterpret_cast<Instr*>(pc_); |
170 Instr* instr = reinterpret_cast<Instr*>(instructions); | 166 Instr* instr = reinterpret_cast<Instr*>(instructions); |
171 for (int i = 0; i < instruction_count; i++) { | 167 for (int i = 0; i < instruction_count; i++) { |
172 *(pc + i) = *(instr + i); | 168 *(pc + i) = *(instr + i); |
173 } | 169 } |
(...skipping 46 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
220 } | 216 } |
221 | 217 |
222 | 218 |
223 // ----------------------------------------------------------------------------- | 219 // ----------------------------------------------------------------------------- |
224 // Specific instructions, constants, and masks. | 220 // Specific instructions, constants, and masks. |
225 | 221 |
226 | 222 |
227 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) | 223 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) |
228 : AssemblerBase(isolate, buffer, buffer_size), | 224 : AssemblerBase(isolate, buffer, buffer_size), |
229 recorded_ast_id_(TypeFeedbackId::None()), | 225 recorded_ast_id_(TypeFeedbackId::None()), |
230 #if V8_OOL_CONSTANT_POOL | |
231 constant_pool_builder_(), | |
232 #endif | |
233 positions_recorder_(this) { | 226 positions_recorder_(this) { |
234 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); | 227 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); |
235 | 228 |
236 no_trampoline_pool_before_ = 0; | 229 no_trampoline_pool_before_ = 0; |
237 trampoline_pool_blocked_nesting_ = 0; | 230 trampoline_pool_blocked_nesting_ = 0; |
238 // We leave space (kMaxBlockTrampolineSectionSize) | 231 // We leave space (kMaxBlockTrampolineSectionSize) |
239 // for BlockTrampolinePoolScope buffer. | 232 // for BlockTrampolinePoolScope buffer. |
240 next_buffer_check_ = | 233 next_buffer_check_ = |
241 FLAG_force_long_branches ? kMaxInt : kMaxCondBranchReach - | 234 FLAG_force_long_branches ? kMaxInt : kMaxCondBranchReach - |
242 kMaxBlockTrampolineSectionSize; | 235 kMaxBlockTrampolineSectionSize; |
243 internal_trampoline_exception_ = false; | 236 internal_trampoline_exception_ = false; |
244 last_bound_pos_ = 0; | 237 last_bound_pos_ = 0; |
245 trampoline_emitted_ = FLAG_force_long_branches; | 238 trampoline_emitted_ = FLAG_force_long_branches; |
246 unbound_labels_count_ = 0; | 239 unbound_labels_count_ = 0; |
247 ClearRecordedAstId(); | 240 ClearRecordedAstId(); |
248 } | 241 } |
249 | 242 |
250 | 243 |
251 void Assembler::GetCode(CodeDesc* desc) { | 244 void Assembler::GetCode(CodeDesc* desc) { |
252 reloc_info_writer.Finish(); | 245 reloc_info_writer.Finish(); |
| 246 |
253 // Set up code descriptor. | 247 // Set up code descriptor. |
254 desc->buffer = buffer_; | 248 desc->buffer = buffer_; |
255 desc->buffer_size = buffer_size_; | 249 desc->buffer_size = buffer_size_; |
256 desc->instr_size = pc_offset(); | 250 desc->instr_size = pc_offset(); |
257 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); | 251 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); |
258 desc->origin = this; | 252 desc->origin = this; |
259 } | 253 } |
260 | 254 |
261 | 255 |
262 void Assembler::Align(int m) { | 256 void Assembler::Align(int m) { |
(...skipping 216 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
479 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 2, | 473 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 2, |
480 CodePatcher::DONT_FLUSH); | 474 CodePatcher::DONT_FLUSH); |
481 patcher.masm()->bitwise_mov32(dst, offset); | 475 patcher.masm()->bitwise_mov32(dst, offset); |
482 break; | 476 break; |
483 } | 477 } |
484 case kUnboundMovLabelAddrOpcode: { | 478 case kUnboundMovLabelAddrOpcode: { |
485 // Load the address of the label in a register. | 479 // Load the address of the label in a register. |
486 Register dst = Register::from_code(instr_at(pos + kInstrSize)); | 480 Register dst = Register::from_code(instr_at(pos + kInstrSize)); |
487 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + target_pos); | 481 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + target_pos); |
488 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), | 482 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), |
489 kMovInstructionsNoConstantPool, | 483 kMovInstructions, CodePatcher::DONT_FLUSH); |
490 CodePatcher::DONT_FLUSH); | |
491 AddBoundInternalReferenceLoad(pos); | 484 AddBoundInternalReferenceLoad(pos); |
492 patcher.masm()->bitwise_mov(dst, addr); | 485 patcher.masm()->bitwise_mov(dst, addr); |
493 break; | 486 break; |
494 } | 487 } |
495 case kUnboundJumpTableEntryOpcode: { | 488 case kUnboundJumpTableEntryOpcode: { |
496 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + target_pos); | 489 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + target_pos); |
497 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), | 490 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), |
498 kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH); | 491 kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH); |
499 AddBoundInternalReference(pos); | 492 AddBoundInternalReference(pos); |
500 patcher.masm()->emit_ptr(addr); | 493 patcher.masm()->emit_ptr(addr); |
(...skipping 1038 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1539 addr = target_address_at(pc, constant_pool) + delta; | 1532 addr = target_address_at(pc, constant_pool) + delta; |
1540 } else { | 1533 } else { |
1541 // remove when serializer properly supports internal references | 1534 // remove when serializer properly supports internal references |
1542 addr = code_start; | 1535 addr = code_start; |
1543 } | 1536 } |
1544 set_target_address_at(pc, constant_pool, addr, icache_flush_mode); | 1537 set_target_address_at(pc, constant_pool, addr, icache_flush_mode); |
1545 } | 1538 } |
1546 } | 1539 } |
1547 | 1540 |
1548 | 1541 |
1549 int Assembler::instructions_required_for_mov(const Operand& x) const { | |
1550 #if V8_OOL_CONSTANT_POOL || DEBUG | |
1551 bool canOptimize = | |
1552 !(x.must_output_reloc_info(this) || is_trampoline_pool_blocked()); | |
1553 #endif | |
1554 #if V8_OOL_CONSTANT_POOL | |
1555 if (use_constant_pool_for_mov(x, canOptimize)) { | |
1556 // Current usage guarantees that all constant pool references can | |
1557 // use the same sequence. | |
1558 return kMovInstructionsConstantPool; | |
1559 } | |
1560 #endif | |
1561 DCHECK(!canOptimize); | |
1562 return kMovInstructionsNoConstantPool; | |
1563 } | |
1564 | |
1565 | |
1566 #if V8_OOL_CONSTANT_POOL | |
1567 bool Assembler::use_constant_pool_for_mov(const Operand& x, | |
1568 bool canOptimize) const { | |
1569 if (!is_ool_constant_pool_available() || is_constant_pool_full()) { | |
1570 // If there is no constant pool available, we must use a mov | |
1571 // immediate sequence. | |
1572 return false; | |
1573 } | |
1574 | |
1575 intptr_t value = x.immediate(); | |
1576 if (canOptimize && is_int16(value)) { | |
1577 // Prefer a single-instruction load-immediate. | |
1578 return false; | |
1579 } | |
1580 | |
1581 return true; | |
1582 } | |
1583 | |
1584 | |
1585 void Assembler::EnsureSpaceFor(int space_needed) { | 1542 void Assembler::EnsureSpaceFor(int space_needed) { |
1586 if (buffer_space() <= (kGap + space_needed)) { | 1543 if (buffer_space() <= (kGap + space_needed)) { |
1587 GrowBuffer(); | 1544 GrowBuffer(space_needed); |
1588 } | 1545 } |
1589 } | 1546 } |
1590 #endif | |
1591 | 1547 |
1592 | 1548 |
1593 bool Operand::must_output_reloc_info(const Assembler* assembler) const { | 1549 bool Operand::must_output_reloc_info(const Assembler* assembler) const { |
1594 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { | 1550 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { |
1595 if (assembler != NULL && assembler->predictable_code_size()) return true; | 1551 if (assembler != NULL && assembler->predictable_code_size()) return true; |
1596 return assembler->serializer_enabled(); | 1552 return assembler->serializer_enabled(); |
1597 } else if (RelocInfo::IsNone(rmode_)) { | 1553 } else if (RelocInfo::IsNone(rmode_)) { |
1598 return false; | 1554 return false; |
1599 } | 1555 } |
1600 return true; | 1556 return true; |
1601 } | 1557 } |
1602 | 1558 |
1603 | 1559 |
1604 // Primarily used for loading constants | 1560 // Primarily used for loading constants |
1605 // This should really move to be in macro-assembler as it | 1561 // This should really move to be in macro-assembler as it |
1606 // is really a pseudo instruction | 1562 // is really a pseudo instruction |
1607 // Some usages of this intend for a FIXED_SEQUENCE to be used | 1563 // Some usages of this intend for a FIXED_SEQUENCE to be used |
1608 // Todo - break this dependency so we can optimize mov() in general | 1564 // Todo - break this dependency so we can optimize mov() in general |
1609 // and only use the generic version when we require a fixed sequence | 1565 // and only use the generic version when we require a fixed sequence |
1610 void Assembler::mov(Register dst, const Operand& src) { | 1566 void Assembler::mov(Register dst, const Operand& src) { |
1611 intptr_t value = src.immediate(); | 1567 intptr_t value = src.immediate(); |
1612 bool canOptimize; | 1568 bool canOptimize; |
1613 RelocInfo rinfo(pc_, src.rmode_, value, NULL); | 1569 RelocInfo rinfo(pc_, src.rmode_, value, NULL); |
1614 | 1570 |
1615 if (src.must_output_reloc_info(this)) { | |
1616 RecordRelocInfo(rinfo); | |
1617 } | |
1618 | |
1619 canOptimize = !(src.must_output_reloc_info(this) || | 1571 canOptimize = !(src.must_output_reloc_info(this) || |
1620 (is_trampoline_pool_blocked() && !is_int16(value))); | 1572 (is_trampoline_pool_blocked() && !is_int16(value))); |
1621 | 1573 |
1622 #if V8_OOL_CONSTANT_POOL | |
1623 if (use_constant_pool_for_mov(src, canOptimize)) { | |
1624 DCHECK(is_ool_constant_pool_available()); | |
1625 ConstantPoolAddEntry(rinfo); | |
1626 #if V8_TARGET_ARCH_PPC64 | |
1627 BlockTrampolinePoolScope block_trampoline_pool(this); | |
1628 // We are forced to use 2 instruction sequence since the constant | |
1629 // pool pointer is tagged. | |
1630 li(dst, Operand::Zero()); | |
1631 ldx(dst, MemOperand(kConstantPoolRegister, dst)); | |
1632 #else | |
1633 lwz(dst, MemOperand(kConstantPoolRegister, 0)); | |
1634 #endif | |
1635 return; | |
1636 } | |
1637 #endif | |
1638 | |
1639 if (canOptimize) { | 1574 if (canOptimize) { |
1640 if (is_int16(value)) { | 1575 if (is_int16(value)) { |
1641 li(dst, Operand(value)); | 1576 li(dst, Operand(value)); |
1642 } else { | 1577 } else { |
1643 uint16_t u16; | 1578 uint16_t u16; |
1644 #if V8_TARGET_ARCH_PPC64 | 1579 #if V8_TARGET_ARCH_PPC64 |
1645 if (is_int32(value)) { | 1580 if (is_int32(value)) { |
1646 #endif | 1581 #endif |
1647 lis(dst, Operand(value >> 16)); | 1582 lis(dst, Operand(value >> 16)); |
1648 #if V8_TARGET_ARCH_PPC64 | 1583 #if V8_TARGET_ARCH_PPC64 |
(...skipping 16 matching lines...) Expand all Loading... |
1665 #endif | 1600 #endif |
1666 u16 = (value & 0xffff); | 1601 u16 = (value & 0xffff); |
1667 if (u16) { | 1602 if (u16) { |
1668 ori(dst, dst, Operand(u16)); | 1603 ori(dst, dst, Operand(u16)); |
1669 } | 1604 } |
1670 } | 1605 } |
1671 return; | 1606 return; |
1672 } | 1607 } |
1673 | 1608 |
1674 DCHECK(!canOptimize); | 1609 DCHECK(!canOptimize); |
| 1610 if (src.must_output_reloc_info(this)) { |
| 1611 RecordRelocInfo(rinfo); |
| 1612 } |
1675 bitwise_mov(dst, value); | 1613 bitwise_mov(dst, value); |
1676 } | 1614 } |
1677 | 1615 |
1678 | 1616 |
1679 void Assembler::bitwise_mov(Register dst, intptr_t value) { | 1617 void Assembler::bitwise_mov(Register dst, intptr_t value) { |
1680 BlockTrampolinePoolScope block_trampoline_pool(this); | 1618 BlockTrampolinePoolScope block_trampoline_pool(this); |
1681 #if V8_TARGET_ARCH_PPC64 | 1619 #if V8_TARGET_ARCH_PPC64 |
1682 int32_t hi_32 = static_cast<int32_t>(value >> 32); | 1620 int32_t hi_32 = static_cast<int32_t>(value >> 32); |
1683 int32_t lo_32 = static_cast<int32_t>(value); | 1621 int32_t lo_32 = static_cast<int32_t>(value); |
1684 int hi_word = static_cast<int>(hi_32 >> 16); | 1622 int hi_word = static_cast<int>(hi_32 >> 16); |
(...skipping 43 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1728 // beginning of the code. | 1666 // beginning of the code. |
1729 // | 1667 // |
1730 // target_at extracts the link and target_at_put patches the instructions. | 1668 // target_at extracts the link and target_at_put patches the instructions. |
1731 BlockTrampolinePoolScope block_trampoline_pool(this); | 1669 BlockTrampolinePoolScope block_trampoline_pool(this); |
1732 emit(kUnboundMovLabelOffsetOpcode | (link & kImm26Mask)); | 1670 emit(kUnboundMovLabelOffsetOpcode | (link & kImm26Mask)); |
1733 emit(dst.code()); | 1671 emit(dst.code()); |
1734 } | 1672 } |
1735 } | 1673 } |
1736 | 1674 |
1737 | 1675 |
1738 // TODO(mbrandy): allow loading internal reference from constant pool | |
1739 void Assembler::mov_label_addr(Register dst, Label* label) { | 1676 void Assembler::mov_label_addr(Register dst, Label* label) { |
1740 CheckBuffer(); | 1677 CheckBuffer(); |
1741 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); | 1678 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
1742 int position = link(label); | 1679 int position = link(label); |
1743 if (label->is_bound()) { | 1680 if (label->is_bound()) { |
1744 // CheckBuffer() is called too frequently. This will pre-grow | 1681 // CheckBuffer() is called too frequently. This will pre-grow |
1745 // the buffer if needed to avoid spliting the relocation and instructions | 1682 // the buffer if needed to avoid spliting the relocation and instructions |
1746 #if V8_OOL_CONSTANT_POOL | 1683 EnsureSpaceFor(kMovInstructions * kInstrSize); |
1747 EnsureSpaceFor(kMovInstructionsNoConstantPool * kInstrSize); | |
1748 #endif | |
1749 | 1684 |
1750 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + position); | 1685 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + position); |
1751 AddBoundInternalReferenceLoad(pc_offset()); | 1686 AddBoundInternalReferenceLoad(pc_offset()); |
1752 bitwise_mov(dst, addr); | 1687 bitwise_mov(dst, addr); |
1753 } else { | 1688 } else { |
1754 // Encode internal reference to unbound label. We use a dummy opcode | 1689 // Encode internal reference to unbound label. We use a dummy opcode |
1755 // such that it won't collide with any opcode that might appear in the | 1690 // such that it won't collide with any opcode that might appear in the |
1756 // label's chain. Encode the destination register in the 2nd instruction. | 1691 // label's chain. Encode the destination register in the 2nd instruction. |
1757 int link = position - pc_offset(); | 1692 int link = position - pc_offset(); |
1758 DCHECK_EQ(0, link & 3); | 1693 DCHECK_EQ(0, link & 3); |
1759 link >>= 2; | 1694 link >>= 2; |
1760 DCHECK(is_int26(link)); | 1695 DCHECK(is_int26(link)); |
1761 | 1696 |
1762 // When the label is bound, these instructions will be patched | 1697 // When the label is bound, these instructions will be patched |
1763 // with a multi-instruction mov sequence that will load the | 1698 // with a multi-instruction mov sequence that will load the |
1764 // destination register with the address of the label. | 1699 // destination register with the address of the label. |
1765 // | 1700 // |
1766 // target_at extracts the link and target_at_put patches the instructions. | 1701 // target_at extracts the link and target_at_put patches the instructions. |
1767 BlockTrampolinePoolScope block_trampoline_pool(this); | 1702 BlockTrampolinePoolScope block_trampoline_pool(this); |
1768 emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask)); | 1703 emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask)); |
1769 emit(dst.code()); | 1704 emit(dst.code()); |
1770 DCHECK(kMovInstructionsNoConstantPool >= 2); | 1705 DCHECK(kMovInstructions >= 2); |
1771 for (int i = 0; i < kMovInstructionsNoConstantPool - 2; i++) nop(); | 1706 for (int i = 0; i < kMovInstructions - 2; i++) nop(); |
1772 } | 1707 } |
1773 } | 1708 } |
1774 | 1709 |
1775 | 1710 |
1776 void Assembler::emit_label_addr(Label* label) { | 1711 void Assembler::emit_label_addr(Label* label) { |
1777 CheckBuffer(); | 1712 CheckBuffer(); |
1778 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); | 1713 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); |
1779 int position = link(label); | 1714 int position = link(label); |
1780 if (label->is_bound()) { | 1715 if (label->is_bound()) { |
1781 // CheckBuffer() is called too frequently. This will pre-grow | 1716 // CheckBuffer() is called too frequently. This will pre-grow |
1782 // the buffer if needed to avoid spliting the relocation and entry. | 1717 // the buffer if needed to avoid spliting the relocation and entry. |
1783 #if V8_OOL_CONSTANT_POOL | |
1784 EnsureSpaceFor(kPointerSize); | 1718 EnsureSpaceFor(kPointerSize); |
1785 #endif | |
1786 | 1719 |
1787 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + position); | 1720 intptr_t addr = reinterpret_cast<uintptr_t>(buffer_ + position); |
1788 AddBoundInternalReference(pc_offset()); | 1721 AddBoundInternalReference(pc_offset()); |
1789 emit_ptr(addr); | 1722 emit_ptr(addr); |
1790 } else { | 1723 } else { |
1791 // Encode internal reference to unbound label. We use a dummy opcode | 1724 // Encode internal reference to unbound label. We use a dummy opcode |
1792 // such that it won't collide with any opcode that might appear in the | 1725 // such that it won't collide with any opcode that might appear in the |
1793 // label's chain. | 1726 // label's chain. |
1794 int link = position - pc_offset(); | 1727 int link = position - pc_offset(); |
1795 DCHECK_EQ(0, link & 3); | 1728 DCHECK_EQ(0, link & 3); |
(...skipping 463 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2259 case DEBUG_BREAK_NOP: | 2192 case DEBUG_BREAK_NOP: |
2260 reg = 3; | 2193 reg = 3; |
2261 break; | 2194 break; |
2262 default: | 2195 default: |
2263 UNIMPLEMENTED(); | 2196 UNIMPLEMENTED(); |
2264 } | 2197 } |
2265 return instr == (ORI | reg * B21 | reg * B16); | 2198 return instr == (ORI | reg * B21 | reg * B16); |
2266 } | 2199 } |
2267 | 2200 |
2268 | 2201 |
2269 // Debugging. | 2202 void Assembler::GrowBuffer(int needed) { |
2270 void Assembler::GrowBuffer() { | |
2271 if (!own_buffer_) FATAL("external code buffer is too small"); | 2203 if (!own_buffer_) FATAL("external code buffer is too small"); |
2272 | 2204 |
2273 // Compute new buffer size. | 2205 // Compute new buffer size. |
2274 CodeDesc desc; // the new buffer | 2206 CodeDesc desc; // the new buffer |
2275 if (buffer_size_ < 4 * KB) { | 2207 if (buffer_size_ < 4 * KB) { |
2276 desc.buffer_size = 4 * KB; | 2208 desc.buffer_size = 4 * KB; |
2277 } else if (buffer_size_ < 1 * MB) { | 2209 } else if (buffer_size_ < 1 * MB) { |
2278 desc.buffer_size = 2 * buffer_size_; | 2210 desc.buffer_size = 2 * buffer_size_; |
2279 } else { | 2211 } else { |
2280 desc.buffer_size = buffer_size_ + 1 * MB; | 2212 desc.buffer_size = buffer_size_ + 1 * MB; |
2281 } | 2213 } |
| 2214 int space = buffer_space() + (desc.buffer_size - buffer_size_); |
| 2215 if (space < needed) { |
| 2216 desc.buffer_size += needed - space; |
| 2217 } |
2282 CHECK_GT(desc.buffer_size, 0); // no overflow | 2218 CHECK_GT(desc.buffer_size, 0); // no overflow |
2283 | 2219 |
2284 // Set up new buffer. | 2220 // Set up new buffer. |
2285 desc.buffer = NewArray<byte>(desc.buffer_size); | 2221 desc.buffer = NewArray<byte>(desc.buffer_size); |
2286 | 2222 |
2287 desc.instr_size = pc_offset(); | 2223 desc.instr_size = pc_offset(); |
2288 desc.reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); | 2224 desc.reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); |
2289 | 2225 |
2290 // Copy the data. | 2226 // Copy the data. |
2291 intptr_t pc_delta = desc.buffer - buffer_; | 2227 intptr_t pc_delta = desc.buffer - buffer_; |
(...skipping 13 matching lines...) Expand all Loading... |
2305 | 2241 |
2306 // Relocate internal references | 2242 // Relocate internal references |
2307 for (int pos : internal_reference_positions_) { | 2243 for (int pos : internal_reference_positions_) { |
2308 RelocateInternalReference(buffer_ + pos, pc_delta, 0, | 2244 RelocateInternalReference(buffer_ + pos, pc_delta, 0, |
2309 RelocInfo::INTERNAL_REFERENCE); | 2245 RelocInfo::INTERNAL_REFERENCE); |
2310 } | 2246 } |
2311 for (int pos : internal_reference_load_positions_) { | 2247 for (int pos : internal_reference_load_positions_) { |
2312 RelocateInternalReference(buffer_ + pos, pc_delta, 0, | 2248 RelocateInternalReference(buffer_ + pos, pc_delta, 0, |
2313 RelocInfo::INTERNAL_REFERENCE_ENCODED); | 2249 RelocInfo::INTERNAL_REFERENCE_ENCODED); |
2314 } | 2250 } |
2315 #if V8_OOL_CONSTANT_POOL | |
2316 constant_pool_builder_.Relocate(pc_delta); | |
2317 #endif | |
2318 } | 2251 } |
2319 | 2252 |
2320 | 2253 |
2321 void Assembler::db(uint8_t data) { | 2254 void Assembler::db(uint8_t data) { |
2322 CheckBuffer(); | 2255 CheckBuffer(); |
2323 *reinterpret_cast<uint8_t*>(pc_) = data; | 2256 *reinterpret_cast<uint8_t*>(pc_) = data; |
2324 pc_ += sizeof(uint8_t); | 2257 pc_ += sizeof(uint8_t); |
2325 } | 2258 } |
2326 | 2259 |
2327 | 2260 |
2328 void Assembler::dd(uint32_t data) { | 2261 void Assembler::dd(uint32_t data) { |
2329 CheckBuffer(); | 2262 CheckBuffer(); |
2330 *reinterpret_cast<uint32_t*>(pc_) = data; | 2263 *reinterpret_cast<uint32_t*>(pc_) = data; |
2331 pc_ += sizeof(uint32_t); | 2264 pc_ += sizeof(uint32_t); |
2332 } | 2265 } |
2333 | 2266 |
2334 | 2267 |
2335 void Assembler::emit_ptr(intptr_t data) { | 2268 void Assembler::emit_ptr(intptr_t data) { |
2336 CheckBuffer(); | 2269 CheckBuffer(); |
2337 *reinterpret_cast<uintptr_t*>(pc_) = data; | 2270 *reinterpret_cast<intptr_t*>(pc_) = data; |
2338 pc_ += sizeof(uintptr_t); | 2271 pc_ += sizeof(intptr_t); |
2339 } | 2272 } |
2340 | 2273 |
2341 | 2274 |
2342 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2275 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
2343 RelocInfo rinfo(pc_, rmode, data, NULL); | 2276 RelocInfo rinfo(pc_, rmode, data, NULL); |
2344 RecordRelocInfo(rinfo); | 2277 RecordRelocInfo(rinfo); |
2345 } | 2278 } |
2346 | 2279 |
2347 | 2280 |
2348 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { | 2281 void Assembler::RecordRelocInfo(const RelocInfo& rinfo) { |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2422 // Number of branches to unbound label at this point is zero, so we can | 2355 // Number of branches to unbound label at this point is zero, so we can |
2423 // move next buffer check to maximum. | 2356 // move next buffer check to maximum. |
2424 next_buffer_check_ = | 2357 next_buffer_check_ = |
2425 pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize; | 2358 pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize; |
2426 } | 2359 } |
2427 return; | 2360 return; |
2428 } | 2361 } |
2429 | 2362 |
2430 | 2363 |
2431 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { | 2364 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { |
2432 #if V8_OOL_CONSTANT_POOL | |
2433 return constant_pool_builder_.New(isolate); | |
2434 #else | |
2435 // No out-of-line constant pool support. | |
2436 DCHECK(!FLAG_enable_ool_constant_pool); | 2365 DCHECK(!FLAG_enable_ool_constant_pool); |
2437 return isolate->factory()->empty_constant_pool_array(); | 2366 return isolate->factory()->empty_constant_pool_array(); |
2438 #endif | |
2439 } | 2367 } |
2440 | 2368 |
2441 | 2369 |
2442 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { | 2370 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { |
2443 #if V8_OOL_CONSTANT_POOL | |
2444 constant_pool_builder_.Populate(this, constant_pool); | |
2445 #else | |
2446 // No out-of-line constant pool support. | |
2447 DCHECK(!FLAG_enable_ool_constant_pool); | 2371 DCHECK(!FLAG_enable_ool_constant_pool); |
2448 #endif | |
2449 } | 2372 } |
2450 | |
2451 | |
2452 #if V8_OOL_CONSTANT_POOL | |
2453 ConstantPoolBuilder::ConstantPoolBuilder() | |
2454 : size_(0), | |
2455 entries_(), | |
2456 current_section_(ConstantPoolArray::SMALL_SECTION) {} | |
2457 | |
2458 | |
2459 bool ConstantPoolBuilder::IsEmpty() { return entries_.size() == 0; } | |
2460 | |
2461 | |
2462 ConstantPoolArray::Type ConstantPoolBuilder::GetConstantPoolType( | |
2463 RelocInfo::Mode rmode) { | |
2464 #if V8_TARGET_ARCH_PPC64 | |
2465 // We don't support 32-bit entries at this time. | |
2466 if (!RelocInfo::IsGCRelocMode(rmode)) { | |
2467 return ConstantPoolArray::INT64; | |
2468 #else | |
2469 if (rmode == RelocInfo::NONE64) { | |
2470 return ConstantPoolArray::INT64; | |
2471 } else if (!RelocInfo::IsGCRelocMode(rmode)) { | |
2472 return ConstantPoolArray::INT32; | |
2473 #endif | |
2474 } else if (RelocInfo::IsCodeTarget(rmode)) { | |
2475 return ConstantPoolArray::CODE_PTR; | |
2476 } else { | |
2477 DCHECK(RelocInfo::IsGCRelocMode(rmode) && !RelocInfo::IsCodeTarget(rmode)); | |
2478 return ConstantPoolArray::HEAP_PTR; | |
2479 } | |
2480 } | |
2481 | |
2482 | |
2483 ConstantPoolArray::LayoutSection ConstantPoolBuilder::AddEntry( | |
2484 Assembler* assm, const RelocInfo& rinfo) { | |
2485 RelocInfo::Mode rmode = rinfo.rmode(); | |
2486 DCHECK(rmode != RelocInfo::COMMENT && rmode != RelocInfo::POSITION && | |
2487 rmode != RelocInfo::STATEMENT_POSITION && | |
2488 rmode != RelocInfo::CONST_POOL); | |
2489 | |
2490 // Try to merge entries which won't be patched. | |
2491 int merged_index = -1; | |
2492 ConstantPoolArray::LayoutSection entry_section = current_section_; | |
2493 if (RelocInfo::IsNone(rmode) || | |
2494 (!assm->serializer_enabled() && (rmode >= RelocInfo::CELL))) { | |
2495 size_t i; | |
2496 std::vector<ConstantPoolEntry>::const_iterator it; | |
2497 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) { | |
2498 if (RelocInfo::IsEqual(rinfo, it->rinfo_)) { | |
2499 // Merge with found entry. | |
2500 merged_index = i; | |
2501 entry_section = entries_[i].section_; | |
2502 break; | |
2503 } | |
2504 } | |
2505 } | |
2506 DCHECK(entry_section <= current_section_); | |
2507 entries_.push_back(ConstantPoolEntry(rinfo, entry_section, merged_index)); | |
2508 | |
2509 if (merged_index == -1) { | |
2510 // Not merged, so update the appropriate count. | |
2511 number_of_entries_[entry_section].increment(GetConstantPoolType(rmode)); | |
2512 } | |
2513 | |
2514 // Check if we still have room for another entry in the small section | |
2515 // given the limitations of the header's layout fields. | |
2516 if (current_section_ == ConstantPoolArray::SMALL_SECTION) { | |
2517 size_ = ConstantPoolArray::SizeFor(*small_entries()); | |
2518 if (!is_uint12(size_)) { | |
2519 current_section_ = ConstantPoolArray::EXTENDED_SECTION; | |
2520 } | |
2521 } else { | |
2522 size_ = ConstantPoolArray::SizeForExtended(*small_entries(), | |
2523 *extended_entries()); | |
2524 } | |
2525 | |
2526 return entry_section; | |
2527 } | |
2528 | |
2529 | |
2530 void ConstantPoolBuilder::Relocate(intptr_t pc_delta) { | |
2531 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin(); | |
2532 entry != entries_.end(); entry++) { | |
2533 DCHECK(entry->rinfo_.rmode() != RelocInfo::JS_RETURN); | |
2534 entry->rinfo_.set_pc(entry->rinfo_.pc() + pc_delta); | |
2535 } | |
2536 } | |
2537 | |
2538 | |
2539 Handle<ConstantPoolArray> ConstantPoolBuilder::New(Isolate* isolate) { | |
2540 if (IsEmpty()) { | |
2541 return isolate->factory()->empty_constant_pool_array(); | |
2542 } else if (extended_entries()->is_empty()) { | |
2543 return isolate->factory()->NewConstantPoolArray(*small_entries()); | |
2544 } else { | |
2545 DCHECK(current_section_ == ConstantPoolArray::EXTENDED_SECTION); | |
2546 return isolate->factory()->NewExtendedConstantPoolArray( | |
2547 *small_entries(), *extended_entries()); | |
2548 } | |
2549 } | |
2550 | |
2551 | |
2552 void ConstantPoolBuilder::Populate(Assembler* assm, | |
2553 ConstantPoolArray* constant_pool) { | |
2554 DCHECK_EQ(extended_entries()->is_empty(), | |
2555 !constant_pool->is_extended_layout()); | |
2556 DCHECK(small_entries()->equals(ConstantPoolArray::NumberOfEntries( | |
2557 constant_pool, ConstantPoolArray::SMALL_SECTION))); | |
2558 if (constant_pool->is_extended_layout()) { | |
2559 DCHECK(extended_entries()->equals(ConstantPoolArray::NumberOfEntries( | |
2560 constant_pool, ConstantPoolArray::EXTENDED_SECTION))); | |
2561 } | |
2562 | |
2563 // Set up initial offsets. | |
2564 int offsets[ConstantPoolArray::NUMBER_OF_LAYOUT_SECTIONS] | |
2565 [ConstantPoolArray::NUMBER_OF_TYPES]; | |
2566 for (int section = 0; section <= constant_pool->final_section(); section++) { | |
2567 int section_start = (section == ConstantPoolArray::EXTENDED_SECTION) | |
2568 ? small_entries()->total_count() | |
2569 : 0; | |
2570 for (int i = 0; i < ConstantPoolArray::NUMBER_OF_TYPES; i++) { | |
2571 ConstantPoolArray::Type type = static_cast<ConstantPoolArray::Type>(i); | |
2572 if (number_of_entries_[section].count_of(type) != 0) { | |
2573 offsets[section][type] = constant_pool->OffsetOfElementAt( | |
2574 number_of_entries_[section].base_of(type) + section_start); | |
2575 } | |
2576 } | |
2577 } | |
2578 | |
2579 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin(); | |
2580 entry != entries_.end(); entry++) { | |
2581 RelocInfo rinfo = entry->rinfo_; | |
2582 RelocInfo::Mode rmode = entry->rinfo_.rmode(); | |
2583 ConstantPoolArray::Type type = GetConstantPoolType(rmode); | |
2584 | |
2585 // Update constant pool if necessary and get the entry's offset. | |
2586 int offset; | |
2587 if (entry->merged_index_ == -1) { | |
2588 offset = offsets[entry->section_][type]; | |
2589 offsets[entry->section_][type] += ConstantPoolArray::entry_size(type); | |
2590 if (type == ConstantPoolArray::INT64) { | |
2591 #if V8_TARGET_ARCH_PPC64 | |
2592 constant_pool->set_at_offset(offset, rinfo.data()); | |
2593 #else | |
2594 constant_pool->set_at_offset(offset, rinfo.data64()); | |
2595 } else if (type == ConstantPoolArray::INT32) { | |
2596 constant_pool->set_at_offset(offset, | |
2597 static_cast<int32_t>(rinfo.data())); | |
2598 #endif | |
2599 } else if (type == ConstantPoolArray::CODE_PTR) { | |
2600 constant_pool->set_at_offset(offset, | |
2601 reinterpret_cast<Address>(rinfo.data())); | |
2602 } else { | |
2603 DCHECK(type == ConstantPoolArray::HEAP_PTR); | |
2604 constant_pool->set_at_offset(offset, | |
2605 reinterpret_cast<Object*>(rinfo.data())); | |
2606 } | |
2607 offset -= kHeapObjectTag; | |
2608 entry->merged_index_ = offset; // Stash offset for merged entries. | |
2609 } else { | |
2610 DCHECK(entry->merged_index_ < (entry - entries_.begin())); | |
2611 offset = entries_[entry->merged_index_].merged_index_; | |
2612 } | |
2613 | |
2614 // Patch load instruction with correct offset. | |
2615 Assembler::SetConstantPoolOffset(rinfo.pc(), offset); | |
2616 } | |
2617 } | |
2618 #endif | |
2619 } | 2373 } |
2620 } // namespace v8::internal | 2374 } // namespace v8::internal |
2621 | 2375 |
2622 #endif // V8_TARGET_ARCH_PPC | 2376 #endif // V8_TARGET_ARCH_PPC |
OLD | NEW |