| OLD | NEW |
| 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. | 1 // Copyright (c) 1994-2006 Sun Microsystems Inc. |
| 2 // All Rights Reserved. | 2 // All Rights Reserved. |
| 3 // | 3 // |
| 4 // Redistribution and use in source and binary forms, with or without | 4 // Redistribution and use in source and binary forms, with or without |
| 5 // modification, are permitted provided that the following conditions | 5 // modification, are permitted provided that the following conditions |
| 6 // are met: | 6 // are met: |
| 7 // | 7 // |
| 8 // - Redistributions of source code must retain the above copyright notice, | 8 // - Redistributions of source code must retain the above copyright notice, |
| 9 // this list of conditions and the following disclaimer. | 9 // this list of conditions and the following disclaimer. |
| 10 // | 10 // |
| (...skipping 130 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 141 // ----------------------------------------------------------------------------- | 141 // ----------------------------------------------------------------------------- |
| 142 // Implementation of RelocInfo | 142 // Implementation of RelocInfo |
| 143 | 143 |
| 144 const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE | | 144 const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE | |
| 145 1 << RelocInfo::INTERNAL_REFERENCE_ENCODED; | 145 1 << RelocInfo::INTERNAL_REFERENCE_ENCODED; |
| 146 | 146 |
| 147 | 147 |
| 148 bool RelocInfo::IsCodedSpecially() { | 148 bool RelocInfo::IsCodedSpecially() { |
| 149 // The deserializer needs to know whether a pointer is specially | 149 // The deserializer needs to know whether a pointer is specially |
| 150 // coded. Being specially coded on PPC means that it is a lis/ori | 150 // coded. Being specially coded on PPC means that it is a lis/ori |
| 151 // instruction sequence, and these are always the case inside code | 151 // instruction sequence or is a constant pool entry, and these are |
| 152 // objects. | 152 // always the case inside code objects. |
| 153 return true; | 153 return true; |
| 154 } | 154 } |
| 155 | 155 |
| 156 | 156 |
| 157 bool RelocInfo::IsInConstantPool() { | 157 bool RelocInfo::IsInConstantPool() { |
| 158 if (FLAG_enable_embedded_constant_pool) { |
| 159 Address constant_pool = host_->constant_pool(); |
| 160 return (constant_pool && Assembler::IsConstantPoolLoadStart(pc_)); |
| 161 } |
| 158 return false; | 162 return false; |
| 159 } | 163 } |
| 160 | 164 |
| 161 | 165 |
| 162 // ----------------------------------------------------------------------------- | 166 // ----------------------------------------------------------------------------- |
| 163 // Implementation of Operand and MemOperand | 167 // Implementation of Operand and MemOperand |
| 164 // See assembler-ppc-inl.h for inlined constructors | 168 // See assembler-ppc-inl.h for inlined constructors |
| 165 | 169 |
| 166 Operand::Operand(Handle<Object> handle) { | 170 Operand::Operand(Handle<Object> handle) { |
| 167 AllowDeferredHandleDereference using_raw_address; | 171 AllowDeferredHandleDereference using_raw_address; |
| (...skipping 26 matching lines...) Expand all Loading... |
| 194 } | 198 } |
| 195 | 199 |
| 196 | 200 |
| 197 // ----------------------------------------------------------------------------- | 201 // ----------------------------------------------------------------------------- |
| 198 // Specific instructions, constants, and masks. | 202 // Specific instructions, constants, and masks. |
| 199 | 203 |
| 200 | 204 |
| 201 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) | 205 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) |
| 202 : AssemblerBase(isolate, buffer, buffer_size), | 206 : AssemblerBase(isolate, buffer, buffer_size), |
| 203 recorded_ast_id_(TypeFeedbackId::None()), | 207 recorded_ast_id_(TypeFeedbackId::None()), |
| 208 constant_pool_builder_(kLoadPtrMaxReachBits, kLoadDoubleMaxReachBits), |
| 204 positions_recorder_(this) { | 209 positions_recorder_(this) { |
| 205 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); | 210 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); |
| 206 | 211 |
| 207 no_trampoline_pool_before_ = 0; | 212 no_trampoline_pool_before_ = 0; |
| 208 trampoline_pool_blocked_nesting_ = 0; | 213 trampoline_pool_blocked_nesting_ = 0; |
| 214 constant_pool_entry_sharing_blocked_nesting_ = 0; |
| 209 // We leave space (kMaxBlockTrampolineSectionSize) | 215 // We leave space (kMaxBlockTrampolineSectionSize) |
| 210 // for BlockTrampolinePoolScope buffer. | 216 // for BlockTrampolinePoolScope buffer. |
| 211 next_buffer_check_ = | 217 next_buffer_check_ = |
| 212 FLAG_force_long_branches ? kMaxInt : kMaxCondBranchReach - | 218 FLAG_force_long_branches ? kMaxInt : kMaxCondBranchReach - |
| 213 kMaxBlockTrampolineSectionSize; | 219 kMaxBlockTrampolineSectionSize; |
| 214 internal_trampoline_exception_ = false; | 220 internal_trampoline_exception_ = false; |
| 215 last_bound_pos_ = 0; | 221 last_bound_pos_ = 0; |
| 216 trampoline_emitted_ = FLAG_force_long_branches; | 222 trampoline_emitted_ = FLAG_force_long_branches; |
| 217 unbound_labels_count_ = 0; | 223 unbound_labels_count_ = 0; |
| 218 ClearRecordedAstId(); | 224 ClearRecordedAstId(); |
| 219 relocations_.reserve(128); | 225 relocations_.reserve(128); |
| 220 } | 226 } |
| 221 | 227 |
| 222 | 228 |
| 223 void Assembler::GetCode(CodeDesc* desc) { | 229 void Assembler::GetCode(CodeDesc* desc) { |
| 230 // Emit constant pool if necessary. |
| 231 int offset = EmitConstantPool(); |
| 232 |
| 224 EmitRelocations(); | 233 EmitRelocations(); |
| 225 | 234 |
| 226 // Set up code descriptor. | 235 // Set up code descriptor. |
| 227 desc->buffer = buffer_; | 236 desc->buffer = buffer_; |
| 228 desc->buffer_size = buffer_size_; | 237 desc->buffer_size = buffer_size_; |
| 229 desc->instr_size = pc_offset(); | 238 desc->instr_size = pc_offset(); |
| 230 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); | 239 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); |
| 240 desc->constant_pool_size = (offset ? desc->instr_size - offset : 0); |
| 231 desc->origin = this; | 241 desc->origin = this; |
| 232 } | 242 } |
| 233 | 243 |
| 234 | 244 |
| 235 void Assembler::Align(int m) { | 245 void Assembler::Align(int m) { |
| 236 #if V8_TARGET_ARCH_PPC64 | 246 #if V8_TARGET_ARCH_PPC64 |
| 237 DCHECK(m >= 4 && base::bits::IsPowerOfTwo64(m)); | 247 DCHECK(m >= 4 && base::bits::IsPowerOfTwo64(m)); |
| 238 #else | 248 #else |
| 239 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); | 249 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); |
| 240 #endif | 250 #endif |
| 241 while ((pc_offset() & (m - 1)) != 0) { | 251 // First ensure instruction alignment |
| 252 while (pc_offset() & (kInstrSize - 1)) { |
| 253 db(0); |
| 254 } |
| 255 // Then pad to requested alignedment with nops |
| 256 while (pc_offset() & (m - 1)) { |
| 242 nop(); | 257 nop(); |
| 243 } | 258 } |
| 244 } | 259 } |
| 245 | 260 |
| 246 | 261 |
| 247 void Assembler::CodeTargetAlign() { Align(8); } | 262 void Assembler::CodeTargetAlign() { Align(8); } |
| 248 | 263 |
| 249 | 264 |
| 250 Condition Assembler::GetCondition(Instr instr) { | 265 Condition Assembler::GetCondition(Instr instr) { |
| 251 switch (instr & kCondMask) { | 266 switch (instr & kCondMask) { |
| (...skipping 212 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 464 int32_t offset = target_pos + SIGN_EXT_IMM16(operands & kImm16Mask); | 479 int32_t offset = target_pos + SIGN_EXT_IMM16(operands & kImm16Mask); |
| 465 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 2, | 480 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 2, |
| 466 CodePatcher::DONT_FLUSH); | 481 CodePatcher::DONT_FLUSH); |
| 467 patcher.masm()->bitwise_add32(dst, base, offset); | 482 patcher.masm()->bitwise_add32(dst, base, offset); |
| 468 break; | 483 break; |
| 469 } | 484 } |
| 470 case kUnboundMovLabelAddrOpcode: { | 485 case kUnboundMovLabelAddrOpcode: { |
| 471 // Load the address of the label in a register. | 486 // Load the address of the label in a register. |
| 472 Register dst = Register::from_code(instr_at(pos + kInstrSize)); | 487 Register dst = Register::from_code(instr_at(pos + kInstrSize)); |
| 473 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), | 488 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), |
| 474 kMovInstructions, CodePatcher::DONT_FLUSH); | 489 kMovInstructionsNoConstantPool, |
| 490 CodePatcher::DONT_FLUSH); |
| 475 // Keep internal references relative until EmitRelocations. | 491 // Keep internal references relative until EmitRelocations. |
| 476 patcher.masm()->bitwise_mov(dst, target_pos); | 492 patcher.masm()->bitwise_mov(dst, target_pos); |
| 477 break; | 493 break; |
| 478 } | 494 } |
| 479 case kUnboundJumpTableEntryOpcode: { | 495 case kUnboundJumpTableEntryOpcode: { |
| 480 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), | 496 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), |
| 481 kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH); | 497 kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH); |
| 482 // Keep internal references relative until EmitRelocations. | 498 // Keep internal references relative until EmitRelocations. |
| 483 patcher.masm()->emit_ptr(target_pos); | 499 patcher.masm()->dp(target_pos); |
| 484 break; | 500 break; |
| 485 } | 501 } |
| 486 default: | 502 default: |
| 487 DCHECK(false); | 503 DCHECK(false); |
| 488 break; | 504 break; |
| 489 } | 505 } |
| 490 } | 506 } |
| 491 | 507 |
| 492 | 508 |
| 493 int Assembler::max_reach_from(int pos) { | 509 int Assembler::max_reach_from(int pos) { |
| (...skipping 991 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1485 | 1501 |
| 1486 | 1502 |
| 1487 // Function descriptor for AIX. | 1503 // Function descriptor for AIX. |
| 1488 // Code address skips the function descriptor "header". | 1504 // Code address skips the function descriptor "header". |
| 1489 // TOC and static chain are ignored and set to 0. | 1505 // TOC and static chain are ignored and set to 0. |
| 1490 void Assembler::function_descriptor() { | 1506 void Assembler::function_descriptor() { |
| 1491 #if ABI_USES_FUNCTION_DESCRIPTORS | 1507 #if ABI_USES_FUNCTION_DESCRIPTORS |
| 1492 Label instructions; | 1508 Label instructions; |
| 1493 DCHECK(pc_offset() == 0); | 1509 DCHECK(pc_offset() == 0); |
| 1494 emit_label_addr(&instructions); | 1510 emit_label_addr(&instructions); |
| 1495 emit_ptr(0); | 1511 dp(0); |
| 1496 emit_ptr(0); | 1512 dp(0); |
| 1497 bind(&instructions); | 1513 bind(&instructions); |
| 1498 #endif | 1514 #endif |
| 1499 } | 1515 } |
| 1500 | 1516 |
| 1501 | 1517 |
| 1518 int Assembler::instructions_required_for_mov(Register dst, |
| 1519 const Operand& src) const { |
| 1520 bool canOptimize = |
| 1521 !(src.must_output_reloc_info(this) || is_trampoline_pool_blocked()); |
| 1522 if (use_constant_pool_for_mov(dst, src, canOptimize)) { |
| 1523 if (ConstantPoolAccessIsInOverflow()) { |
| 1524 return kMovInstructionsConstantPool + 1; |
| 1525 } |
| 1526 return kMovInstructionsConstantPool; |
| 1527 } |
| 1528 DCHECK(!canOptimize); |
| 1529 return kMovInstructionsNoConstantPool; |
| 1530 } |
| 1531 |
| 1532 |
| 1533 bool Assembler::use_constant_pool_for_mov(Register dst, const Operand& src, |
| 1534 bool canOptimize) const { |
| 1535 if (!FLAG_enable_embedded_constant_pool || !is_constant_pool_available()) { |
| 1536 // If there is no constant pool available, we must use a mov |
| 1537 // immediate sequence. |
| 1538 return false; |
| 1539 } |
| 1540 |
| 1541 intptr_t value = src.immediate(); |
| 1542 #if V8_TARGET_ARCH_PPC64 |
| 1543 bool allowOverflow = !((canOptimize && is_int32(value)) || dst.is(r0)); |
| 1544 #else |
| 1545 bool allowOverflow = !(canOptimize || dst.is(r0)); |
| 1546 #endif |
| 1547 if (canOptimize && is_int16(value)) { |
| 1548 // Prefer a single-instruction load-immediate. |
| 1549 return false; |
| 1550 } |
| 1551 if (!allowOverflow && ConstantPoolAccessIsInOverflow()) { |
| 1552 // Prefer non-relocatable two-instruction bitwise-mov32 over |
| 1553 // overflow sequence. |
| 1554 return false; |
| 1555 } |
| 1556 |
| 1557 return true; |
| 1558 } |
| 1559 |
| 1560 |
| 1502 void Assembler::EnsureSpaceFor(int space_needed) { | 1561 void Assembler::EnsureSpaceFor(int space_needed) { |
| 1503 if (buffer_space() <= (kGap + space_needed)) { | 1562 if (buffer_space() <= (kGap + space_needed)) { |
| 1504 GrowBuffer(space_needed); | 1563 GrowBuffer(space_needed); |
| 1505 } | 1564 } |
| 1506 } | 1565 } |
| 1507 | 1566 |
| 1508 | 1567 |
| 1509 bool Operand::must_output_reloc_info(const Assembler* assembler) const { | 1568 bool Operand::must_output_reloc_info(const Assembler* assembler) const { |
| 1510 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { | 1569 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { |
| 1511 if (assembler != NULL && assembler->predictable_code_size()) return true; | 1570 if (assembler != NULL && assembler->predictable_code_size()) return true; |
| (...skipping 12 matching lines...) Expand all Loading... |
| 1524 // Todo - break this dependency so we can optimize mov() in general | 1583 // Todo - break this dependency so we can optimize mov() in general |
| 1525 // and only use the generic version when we require a fixed sequence | 1584 // and only use the generic version when we require a fixed sequence |
| 1526 void Assembler::mov(Register dst, const Operand& src) { | 1585 void Assembler::mov(Register dst, const Operand& src) { |
| 1527 intptr_t value = src.immediate(); | 1586 intptr_t value = src.immediate(); |
| 1528 bool relocatable = src.must_output_reloc_info(this); | 1587 bool relocatable = src.must_output_reloc_info(this); |
| 1529 bool canOptimize; | 1588 bool canOptimize; |
| 1530 | 1589 |
| 1531 canOptimize = | 1590 canOptimize = |
| 1532 !(relocatable || (is_trampoline_pool_blocked() && !is_int16(value))); | 1591 !(relocatable || (is_trampoline_pool_blocked() && !is_int16(value))); |
| 1533 | 1592 |
| 1593 if (use_constant_pool_for_mov(dst, src, canOptimize)) { |
| 1594 DCHECK(is_constant_pool_available()); |
| 1595 if (relocatable) { |
| 1596 RecordRelocInfo(src.rmode_); |
| 1597 } |
| 1598 ConstantPoolEntry::Access access = ConstantPoolAddEntry(src.rmode_, value); |
| 1599 #if V8_TARGET_ARCH_PPC64 |
| 1600 if (access == ConstantPoolEntry::OVERFLOWED) { |
| 1601 addis(dst, kConstantPoolRegister, Operand::Zero()); |
| 1602 ld(dst, MemOperand(dst, 0)); |
| 1603 } else { |
| 1604 ld(dst, MemOperand(kConstantPoolRegister, 0)); |
| 1605 } |
| 1606 #else |
| 1607 if (access == ConstantPoolEntry::OVERFLOWED) { |
| 1608 addis(dst, kConstantPoolRegister, Operand::Zero()); |
| 1609 lwz(dst, MemOperand(dst, 0)); |
| 1610 } else { |
| 1611 lwz(dst, MemOperand(kConstantPoolRegister, 0)); |
| 1612 } |
| 1613 #endif |
| 1614 return; |
| 1615 } |
| 1616 |
| 1534 if (canOptimize) { | 1617 if (canOptimize) { |
| 1535 if (is_int16(value)) { | 1618 if (is_int16(value)) { |
| 1536 li(dst, Operand(value)); | 1619 li(dst, Operand(value)); |
| 1537 } else { | 1620 } else { |
| 1538 uint16_t u16; | 1621 uint16_t u16; |
| 1539 #if V8_TARGET_ARCH_PPC64 | 1622 #if V8_TARGET_ARCH_PPC64 |
| 1540 if (is_int32(value)) { | 1623 if (is_int32(value)) { |
| 1541 #endif | 1624 #endif |
| 1542 lis(dst, Operand(value >> 16)); | 1625 lis(dst, Operand(value >> 16)); |
| 1543 #if V8_TARGET_ARCH_PPC64 | 1626 #if V8_TARGET_ARCH_PPC64 |
| (...skipping 145 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1689 DCHECK(is_int26(link)); | 1772 DCHECK(is_int26(link)); |
| 1690 | 1773 |
| 1691 // When the label is bound, these instructions will be patched | 1774 // When the label is bound, these instructions will be patched |
| 1692 // with a multi-instruction mov sequence that will load the | 1775 // with a multi-instruction mov sequence that will load the |
| 1693 // destination register with the address of the label. | 1776 // destination register with the address of the label. |
| 1694 // | 1777 // |
| 1695 // target_at extracts the link and target_at_put patches the instructions. | 1778 // target_at extracts the link and target_at_put patches the instructions. |
| 1696 BlockTrampolinePoolScope block_trampoline_pool(this); | 1779 BlockTrampolinePoolScope block_trampoline_pool(this); |
| 1697 emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask)); | 1780 emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask)); |
| 1698 emit(dst.code()); | 1781 emit(dst.code()); |
| 1699 DCHECK(kMovInstructions >= 2); | 1782 DCHECK(kMovInstructionsNoConstantPool >= 2); |
| 1700 for (int i = 0; i < kMovInstructions - 2; i++) nop(); | 1783 for (int i = 0; i < kMovInstructionsNoConstantPool - 2; i++) nop(); |
| 1701 } | 1784 } |
| 1702 } | 1785 } |
| 1703 | 1786 |
| 1704 | 1787 |
| 1705 void Assembler::emit_label_addr(Label* label) { | 1788 void Assembler::emit_label_addr(Label* label) { |
| 1706 CheckBuffer(); | 1789 CheckBuffer(); |
| 1707 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); | 1790 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); |
| 1708 int position = link(label); | 1791 int position = link(label); |
| 1709 if (label->is_bound()) { | 1792 if (label->is_bound()) { |
| 1710 // Keep internal references relative until EmitRelocations. | 1793 // Keep internal references relative until EmitRelocations. |
| 1711 emit_ptr(position); | 1794 dp(position); |
| 1712 } else { | 1795 } else { |
| 1713 // Encode internal reference to unbound label. We use a dummy opcode | 1796 // Encode internal reference to unbound label. We use a dummy opcode |
| 1714 // such that it won't collide with any opcode that might appear in the | 1797 // such that it won't collide with any opcode that might appear in the |
| 1715 // label's chain. | 1798 // label's chain. |
| 1716 int link = position - pc_offset(); | 1799 int link = position - pc_offset(); |
| 1717 DCHECK_EQ(0, link & 3); | 1800 DCHECK_EQ(0, link & 3); |
| 1718 link >>= 2; | 1801 link >>= 2; |
| 1719 DCHECK(is_int26(link)); | 1802 DCHECK(is_int26(link)); |
| 1720 | 1803 |
| 1721 // When the label is bound, the instruction(s) will be patched | 1804 // When the label is bound, the instruction(s) will be patched |
| (...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1832 | 1915 |
| 1833 | 1916 |
| 1834 void Assembler::isync() { emit(EXT1 | ISYNC); } | 1917 void Assembler::isync() { emit(EXT1 | ISYNC); } |
| 1835 | 1918 |
| 1836 | 1919 |
| 1837 // Floating point support | 1920 // Floating point support |
| 1838 | 1921 |
| 1839 void Assembler::lfd(const DoubleRegister frt, const MemOperand& src) { | 1922 void Assembler::lfd(const DoubleRegister frt, const MemOperand& src) { |
| 1840 int offset = src.offset(); | 1923 int offset = src.offset(); |
| 1841 Register ra = src.ra(); | 1924 Register ra = src.ra(); |
| 1925 DCHECK(!ra.is(r0)); |
| 1842 DCHECK(is_int16(offset)); | 1926 DCHECK(is_int16(offset)); |
| 1843 int imm16 = offset & kImm16Mask; | 1927 int imm16 = offset & kImm16Mask; |
| 1844 // could be x_form instruction with some casting magic | 1928 // could be x_form instruction with some casting magic |
| 1845 emit(LFD | frt.code() * B21 | ra.code() * B16 | imm16); | 1929 emit(LFD | frt.code() * B21 | ra.code() * B16 | imm16); |
| 1846 } | 1930 } |
| 1847 | 1931 |
| 1848 | 1932 |
| 1849 void Assembler::lfdu(const DoubleRegister frt, const MemOperand& src) { | 1933 void Assembler::lfdu(const DoubleRegister frt, const MemOperand& src) { |
| 1850 int offset = src.offset(); | 1934 int offset = src.offset(); |
| 1851 Register ra = src.ra(); | 1935 Register ra = src.ra(); |
| 1936 DCHECK(!ra.is(r0)); |
| 1852 DCHECK(is_int16(offset)); | 1937 DCHECK(is_int16(offset)); |
| 1853 int imm16 = offset & kImm16Mask; | 1938 int imm16 = offset & kImm16Mask; |
| 1854 // could be x_form instruction with some casting magic | 1939 // could be x_form instruction with some casting magic |
| 1855 emit(LFDU | frt.code() * B21 | ra.code() * B16 | imm16); | 1940 emit(LFDU | frt.code() * B21 | ra.code() * B16 | imm16); |
| 1856 } | 1941 } |
| 1857 | 1942 |
| 1858 | 1943 |
| 1859 void Assembler::lfdx(const DoubleRegister frt, const MemOperand& src) { | 1944 void Assembler::lfdx(const DoubleRegister frt, const MemOperand& src) { |
| 1860 Register ra = src.ra(); | 1945 Register ra = src.ra(); |
| 1861 Register rb = src.rb(); | 1946 Register rb = src.rb(); |
| (...skipping 379 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2241 } | 2326 } |
| 2242 | 2327 |
| 2243 | 2328 |
| 2244 void Assembler::dd(uint32_t data) { | 2329 void Assembler::dd(uint32_t data) { |
| 2245 CheckBuffer(); | 2330 CheckBuffer(); |
| 2246 *reinterpret_cast<uint32_t*>(pc_) = data; | 2331 *reinterpret_cast<uint32_t*>(pc_) = data; |
| 2247 pc_ += sizeof(uint32_t); | 2332 pc_ += sizeof(uint32_t); |
| 2248 } | 2333 } |
| 2249 | 2334 |
| 2250 | 2335 |
| 2251 void Assembler::emit_ptr(intptr_t data) { | 2336 void Assembler::dq(uint64_t value) { |
| 2252 CheckBuffer(); | 2337 CheckBuffer(); |
| 2253 *reinterpret_cast<intptr_t*>(pc_) = data; | 2338 *reinterpret_cast<uint64_t*>(pc_) = value; |
| 2254 pc_ += sizeof(intptr_t); | 2339 pc_ += sizeof(uint64_t); |
| 2255 } | 2340 } |
| 2256 | 2341 |
| 2257 | 2342 |
| 2258 void Assembler::emit_double(double value) { | 2343 void Assembler::dp(uintptr_t data) { |
| 2259 CheckBuffer(); | 2344 CheckBuffer(); |
| 2260 *reinterpret_cast<double*>(pc_) = value; | 2345 *reinterpret_cast<uintptr_t*>(pc_) = data; |
| 2261 pc_ += sizeof(double); | 2346 pc_ += sizeof(uintptr_t); |
| 2262 } | 2347 } |
| 2263 | 2348 |
| 2264 | 2349 |
| 2265 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { | 2350 void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) { |
| 2351 if (RelocInfo::IsNone(rmode) || |
| 2352 // Don't record external references unless the heap will be serialized. |
| 2353 (rmode == RelocInfo::EXTERNAL_REFERENCE && !serializer_enabled() && |
| 2354 !emit_debug_code())) { |
| 2355 return; |
| 2356 } |
| 2357 if (rmode == RelocInfo::CODE_TARGET_WITH_ID) { |
| 2358 data = RecordedAstId().ToInt(); |
| 2359 ClearRecordedAstId(); |
| 2360 } |
| 2266 DeferredRelocInfo rinfo(pc_offset(), rmode, data); | 2361 DeferredRelocInfo rinfo(pc_offset(), rmode, data); |
| 2267 RecordRelocInfo(rinfo); | 2362 relocations_.push_back(rinfo); |
| 2268 } | 2363 } |
| 2269 | 2364 |
| 2270 | 2365 |
| 2271 void Assembler::RecordRelocInfo(const DeferredRelocInfo& rinfo) { | |
| 2272 if (rinfo.rmode() >= RelocInfo::JS_RETURN && | |
| 2273 rinfo.rmode() <= RelocInfo::DEBUG_BREAK_SLOT) { | |
| 2274 // Adjust code for new modes. | |
| 2275 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo.rmode()) || | |
| 2276 RelocInfo::IsJSReturn(rinfo.rmode()) || | |
| 2277 RelocInfo::IsComment(rinfo.rmode()) || | |
| 2278 RelocInfo::IsPosition(rinfo.rmode())); | |
| 2279 } | |
| 2280 if (!RelocInfo::IsNone(rinfo.rmode())) { | |
| 2281 // Don't record external references unless the heap will be serialized. | |
| 2282 if (rinfo.rmode() == RelocInfo::EXTERNAL_REFERENCE) { | |
| 2283 if (!serializer_enabled() && !emit_debug_code()) { | |
| 2284 return; | |
| 2285 } | |
| 2286 } | |
| 2287 if (rinfo.rmode() == RelocInfo::CODE_TARGET_WITH_ID) { | |
| 2288 DeferredRelocInfo reloc_info_with_ast_id(rinfo.position(), rinfo.rmode(), | |
| 2289 RecordedAstId().ToInt()); | |
| 2290 ClearRecordedAstId(); | |
| 2291 relocations_.push_back(reloc_info_with_ast_id); | |
| 2292 } else { | |
| 2293 relocations_.push_back(rinfo); | |
| 2294 } | |
| 2295 } | |
| 2296 } | |
| 2297 | |
| 2298 | |
| 2299 void Assembler::EmitRelocations() { | 2366 void Assembler::EmitRelocations() { |
| 2300 EnsureSpaceFor(relocations_.size() * kMaxRelocSize); | 2367 EnsureSpaceFor(relocations_.size() * kMaxRelocSize); |
| 2301 | 2368 |
| 2302 for (std::vector<DeferredRelocInfo>::iterator it = relocations_.begin(); | 2369 for (std::vector<DeferredRelocInfo>::iterator it = relocations_.begin(); |
| 2303 it != relocations_.end(); it++) { | 2370 it != relocations_.end(); it++) { |
| 2304 RelocInfo::Mode rmode = it->rmode(); | 2371 RelocInfo::Mode rmode = it->rmode(); |
| 2305 Address pc = buffer_ + it->position(); | 2372 Address pc = buffer_ + it->position(); |
| 2306 Code* code = NULL; | 2373 Code* code = NULL; |
| 2307 RelocInfo rinfo(pc, rmode, it->data(), code); | 2374 RelocInfo rinfo(pc, rmode, it->data(), code); |
| 2308 | 2375 |
| (...skipping 60 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2369 next_buffer_check_ = kMaxInt; | 2436 next_buffer_check_ = kMaxInt; |
| 2370 } | 2437 } |
| 2371 } else { | 2438 } else { |
| 2372 // Number of branches to unbound label at this point is zero, so we can | 2439 // Number of branches to unbound label at this point is zero, so we can |
| 2373 // move next buffer check to maximum. | 2440 // move next buffer check to maximum. |
| 2374 next_buffer_check_ = | 2441 next_buffer_check_ = |
| 2375 pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize; | 2442 pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize; |
| 2376 } | 2443 } |
| 2377 return; | 2444 return; |
| 2378 } | 2445 } |
| 2379 | |
| 2380 | |
| 2381 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { | |
| 2382 DCHECK(!FLAG_enable_ool_constant_pool); | |
| 2383 return isolate->factory()->empty_constant_pool_array(); | |
| 2384 } | |
| 2385 | |
| 2386 | |
| 2387 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { | |
| 2388 DCHECK(!FLAG_enable_ool_constant_pool); | |
| 2389 } | |
| 2390 } | 2446 } |
| 2391 } // namespace v8::internal | 2447 } // namespace v8::internal |
| 2392 | 2448 |
| 2393 #endif // V8_TARGET_ARCH_PPC | 2449 #endif // V8_TARGET_ARCH_PPC |
| OLD | NEW |