Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: src/ppc/assembler-ppc.cc

Issue 1030353003: Enable constant pool support. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 131 matching lines...) Expand 10 before | Expand all | Expand 10 after
142 // ----------------------------------------------------------------------------- 142 // -----------------------------------------------------------------------------
143 // Implementation of RelocInfo 143 // Implementation of RelocInfo
144 144
145 const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE | 145 const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE |
146 1 << RelocInfo::INTERNAL_REFERENCE_ENCODED; 146 1 << RelocInfo::INTERNAL_REFERENCE_ENCODED;
147 147
148 148
149 bool RelocInfo::IsCodedSpecially() { 149 bool RelocInfo::IsCodedSpecially() {
150 // The deserializer needs to know whether a pointer is specially 150 // The deserializer needs to know whether a pointer is specially
151 // coded. Being specially coded on PPC means that it is a lis/ori 151 // coded. Being specially coded on PPC means that it is a lis/ori
152 // instruction sequence, and these are always the case inside code 152 // instruction sequence or is an out of line constant pool entry,
153 // objects. 153 // and these are always the case inside code objects.
154 return true; 154 return true;
155 } 155 }
156 156
157 157
158 bool RelocInfo::IsInConstantPool() { 158 bool RelocInfo::IsInConstantPool() {
159 if (FLAG_enable_embedded_constant_pool) {
160 Address constant_pool = host_->constant_pool();
161 return (constant_pool &&
162 (pc_ >= constant_pool || Assembler::IsConstantPoolLoadStart(pc_)));
163 }
159 return false; 164 return false;
160 } 165 }
161 166
162 167
163 // ----------------------------------------------------------------------------- 168 // -----------------------------------------------------------------------------
164 // Implementation of Operand and MemOperand 169 // Implementation of Operand and MemOperand
165 // See assembler-ppc-inl.h for inlined constructors 170 // See assembler-ppc-inl.h for inlined constructors
166 171
167 Operand::Operand(Handle<Object> handle) { 172 Operand::Operand(Handle<Object> handle) {
168 AllowDeferredHandleDereference using_raw_address; 173 AllowDeferredHandleDereference using_raw_address;
(...skipping 26 matching lines...) Expand all
195 } 200 }
196 201
197 202
198 // ----------------------------------------------------------------------------- 203 // -----------------------------------------------------------------------------
199 // Specific instructions, constants, and masks. 204 // Specific instructions, constants, and masks.
200 205
201 206
202 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) 207 Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size)
203 : AssemblerBase(isolate, buffer, buffer_size), 208 : AssemblerBase(isolate, buffer, buffer_size),
204 recorded_ast_id_(TypeFeedbackId::None()), 209 recorded_ast_id_(TypeFeedbackId::None()),
210 constant_pool_builder_(),
205 positions_recorder_(this) { 211 positions_recorder_(this) {
206 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); 212 reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_);
207 213
208 no_trampoline_pool_before_ = 0; 214 no_trampoline_pool_before_ = 0;
209 trampoline_pool_blocked_nesting_ = 0; 215 trampoline_pool_blocked_nesting_ = 0;
216 constant_pool_entry_sharing_blocked_nesting_ = 0;
210 // We leave space (kMaxBlockTrampolineSectionSize) 217 // We leave space (kMaxBlockTrampolineSectionSize)
211 // for BlockTrampolinePoolScope buffer. 218 // for BlockTrampolinePoolScope buffer.
212 next_buffer_check_ = 219 next_buffer_check_ =
213 FLAG_force_long_branches ? kMaxInt : kMaxCondBranchReach - 220 FLAG_force_long_branches ? kMaxInt : kMaxCondBranchReach -
214 kMaxBlockTrampolineSectionSize; 221 kMaxBlockTrampolineSectionSize;
215 internal_trampoline_exception_ = false; 222 internal_trampoline_exception_ = false;
216 last_bound_pos_ = 0; 223 last_bound_pos_ = 0;
217 trampoline_emitted_ = FLAG_force_long_branches; 224 trampoline_emitted_ = FLAG_force_long_branches;
218 unbound_labels_count_ = 0; 225 unbound_labels_count_ = 0;
219 ClearRecordedAstId(); 226 ClearRecordedAstId();
220 relocations_.reserve(128); 227 relocations_.reserve(128);
221 } 228 }
222 229
223 230
224 void Assembler::GetCode(CodeDesc* desc) { 231 void Assembler::GetCode(CodeDesc* desc) {
232 // Emit constant pool if necessary.
233 int offset = EmitConstantPool();
234
225 EmitRelocations(); 235 EmitRelocations();
226 236
227 // Set up code descriptor. 237 // Set up code descriptor.
228 desc->buffer = buffer_; 238 desc->buffer = buffer_;
229 desc->buffer_size = buffer_size_; 239 desc->buffer_size = buffer_size_;
230 desc->instr_size = pc_offset(); 240 desc->instr_size = pc_offset();
231 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); 241 desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
242 desc->constant_pool_size = (offset ? desc->instr_size - offset : 0);
232 desc->origin = this; 243 desc->origin = this;
233 } 244 }
234 245
235 246
236 void Assembler::Align(int m) { 247 void Assembler::Align(int m) {
237 #if V8_TARGET_ARCH_PPC64 248 #if V8_TARGET_ARCH_PPC64
238 DCHECK(m >= 4 && base::bits::IsPowerOfTwo64(m)); 249 DCHECK(m >= 4 && base::bits::IsPowerOfTwo64(m));
239 #else 250 #else
240 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m)); 251 DCHECK(m >= 4 && base::bits::IsPowerOfTwo32(m));
241 #endif 252 #endif
(...skipping 223 matching lines...) Expand 10 before | Expand all | Expand 10 after
465 int32_t offset = target_pos + SIGN_EXT_IMM16(operands & kImm16Mask); 476 int32_t offset = target_pos + SIGN_EXT_IMM16(operands & kImm16Mask);
466 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 2, 477 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 2,
467 CodePatcher::DONT_FLUSH); 478 CodePatcher::DONT_FLUSH);
468 patcher.masm()->bitwise_add32(dst, base, offset); 479 patcher.masm()->bitwise_add32(dst, base, offset);
469 break; 480 break;
470 } 481 }
471 case kUnboundMovLabelAddrOpcode: { 482 case kUnboundMovLabelAddrOpcode: {
472 // Load the address of the label in a register. 483 // Load the address of the label in a register.
473 Register dst = Register::from_code(instr_at(pos + kInstrSize)); 484 Register dst = Register::from_code(instr_at(pos + kInstrSize));
474 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 485 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos),
475 kMovInstructions, CodePatcher::DONT_FLUSH); 486 kMovInstructionsNoConstantPool,
487 CodePatcher::DONT_FLUSH);
476 // Keep internal references relative until EmitRelocations. 488 // Keep internal references relative until EmitRelocations.
477 patcher.masm()->bitwise_mov(dst, target_pos); 489 patcher.masm()->bitwise_mov(dst, target_pos);
478 break; 490 break;
479 } 491 }
480 case kUnboundJumpTableEntryOpcode: { 492 case kUnboundJumpTableEntryOpcode: {
481 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), 493 CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos),
482 kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH); 494 kPointerSize / kInstrSize, CodePatcher::DONT_FLUSH);
483 // Keep internal references relative until EmitRelocations. 495 // Keep internal references relative until EmitRelocations.
484 patcher.masm()->emit_ptr(target_pos); 496 patcher.masm()->emit_ptr(target_pos);
485 break; 497 break;
(...skipping 1007 matching lines...) Expand 10 before | Expand all | Expand 10 after
1493 Label instructions; 1505 Label instructions;
1494 DCHECK(pc_offset() == 0); 1506 DCHECK(pc_offset() == 0);
1495 emit_label_addr(&instructions); 1507 emit_label_addr(&instructions);
1496 emit_ptr(0); 1508 emit_ptr(0);
1497 emit_ptr(0); 1509 emit_ptr(0);
1498 bind(&instructions); 1510 bind(&instructions);
1499 #endif 1511 #endif
1500 } 1512 }
1501 1513
1502 1514
1515 int Assembler::instructions_required_for_mov(const Operand& x) const {
1516 bool canOptimize =
1517 !(x.must_output_reloc_info(this) || is_trampoline_pool_blocked());
1518 if (use_constant_pool_for_mov(x, canOptimize)) {
1519 // Current usage guarantees that all constant pool references can
1520 // use the same sequence.
1521 return kMovInstructionsConstantPool;
1522 }
1523 DCHECK(!canOptimize);
1524 return kMovInstructionsNoConstantPool;
1525 }
1526
1527
1528 bool Assembler::use_constant_pool_for_mov(const Operand& x,
1529 bool canOptimize) const {
1530 if (!FLAG_enable_embedded_constant_pool || !is_constant_pool_available() ||
1531 is_constant_pool_full()) {
1532 // If there is no constant pool available, we must use a mov
1533 // immediate sequence.
1534 return false;
1535 }
1536
1537 intptr_t value = x.immediate();
1538 if (canOptimize && is_int16(value)) {
1539 // Prefer a single-instruction load-immediate.
1540 return false;
1541 }
1542
1543 return true;
1544 }
1545
1546
1503 void Assembler::EnsureSpaceFor(int space_needed) { 1547 void Assembler::EnsureSpaceFor(int space_needed) {
1504 if (buffer_space() <= (kGap + space_needed)) { 1548 if (buffer_space() <= (kGap + space_needed)) {
1505 GrowBuffer(space_needed); 1549 GrowBuffer(space_needed);
1506 } 1550 }
1507 } 1551 }
1508 1552
1509 1553
1510 bool Operand::must_output_reloc_info(const Assembler* assembler) const { 1554 bool Operand::must_output_reloc_info(const Assembler* assembler) const {
1511 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) { 1555 if (rmode_ == RelocInfo::EXTERNAL_REFERENCE) {
1512 if (assembler != NULL && assembler->predictable_code_size()) return true; 1556 if (assembler != NULL && assembler->predictable_code_size()) return true;
(...skipping 12 matching lines...) Expand all
1525 // Todo - break this dependency so we can optimize mov() in general 1569 // Todo - break this dependency so we can optimize mov() in general
1526 // and only use the generic version when we require a fixed sequence 1570 // and only use the generic version when we require a fixed sequence
1527 void Assembler::mov(Register dst, const Operand& src) { 1571 void Assembler::mov(Register dst, const Operand& src) {
1528 intptr_t value = src.immediate(); 1572 intptr_t value = src.immediate();
1529 bool relocatable = src.must_output_reloc_info(this); 1573 bool relocatable = src.must_output_reloc_info(this);
1530 bool canOptimize; 1574 bool canOptimize;
1531 1575
1532 canOptimize = 1576 canOptimize =
1533 !(relocatable || (is_trampoline_pool_blocked() && !is_int16(value))); 1577 !(relocatable || (is_trampoline_pool_blocked() && !is_int16(value)));
1534 1578
1579 if (use_constant_pool_for_mov(src, canOptimize)) {
1580 DCHECK(is_constant_pool_available());
1581 if (relocatable) {
1582 RecordRelocInfo(src.rmode_);
1583 }
1584 ConstantPoolAddEntry(src.rmode_, value);
1585 #if V8_TARGET_ARCH_PPC64
1586 ld(dst, MemOperand(kConstantPoolRegister, 0));
1587 #else
1588 lwz(dst, MemOperand(kConstantPoolRegister, 0));
1589 #endif
1590 return;
1591 }
1592
1535 if (canOptimize) { 1593 if (canOptimize) {
1536 if (is_int16(value)) { 1594 if (is_int16(value)) {
1537 li(dst, Operand(value)); 1595 li(dst, Operand(value));
1538 } else { 1596 } else {
1539 uint16_t u16; 1597 uint16_t u16;
1540 #if V8_TARGET_ARCH_PPC64 1598 #if V8_TARGET_ARCH_PPC64
1541 if (is_int32(value)) { 1599 if (is_int32(value)) {
1542 #endif 1600 #endif
1543 lis(dst, Operand(value >> 16)); 1601 lis(dst, Operand(value >> 16));
1544 #if V8_TARGET_ARCH_PPC64 1602 #if V8_TARGET_ARCH_PPC64
(...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after
1666 DCHECK(is_int26(link)); 1724 DCHECK(is_int26(link));
1667 DCHECK(is_int16(delta)); 1725 DCHECK(is_int16(delta));
1668 1726
1669 BlockTrampolinePoolScope block_trampoline_pool(this); 1727 BlockTrampolinePoolScope block_trampoline_pool(this);
1670 emit(kUnboundAddLabelOffsetOpcode | (link & kImm26Mask)); 1728 emit(kUnboundAddLabelOffsetOpcode | (link & kImm26Mask));
1671 emit(dst.code() * B21 | base.code() * B16 | (delta & kImm16Mask)); 1729 emit(dst.code() * B21 | base.code() * B16 | (delta & kImm16Mask));
1672 } 1730 }
1673 } 1731 }
1674 1732
1675 1733
1734 // TODO(mbrandy): allow loading internal reference from constant pool
1676 void Assembler::mov_label_addr(Register dst, Label* label) { 1735 void Assembler::mov_label_addr(Register dst, Label* label) {
1677 CheckBuffer(); 1736 CheckBuffer();
1678 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); 1737 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED);
1679 int position = link(label); 1738 int position = link(label);
1680 if (label->is_bound()) { 1739 if (label->is_bound()) {
1681 // Keep internal references relative until EmitRelocations. 1740 // Keep internal references relative until EmitRelocations.
1682 bitwise_mov(dst, position); 1741 bitwise_mov(dst, position);
1683 } else { 1742 } else {
1684 // Encode internal reference to unbound label. We use a dummy opcode 1743 // Encode internal reference to unbound label. We use a dummy opcode
1685 // such that it won't collide with any opcode that might appear in the 1744 // such that it won't collide with any opcode that might appear in the
1686 // label's chain. Encode the destination register in the 2nd instruction. 1745 // label's chain. Encode the destination register in the 2nd instruction.
1687 int link = position - pc_offset(); 1746 int link = position - pc_offset();
1688 DCHECK_EQ(0, link & 3); 1747 DCHECK_EQ(0, link & 3);
1689 link >>= 2; 1748 link >>= 2;
1690 DCHECK(is_int26(link)); 1749 DCHECK(is_int26(link));
1691 1750
1692 // When the label is bound, these instructions will be patched 1751 // When the label is bound, these instructions will be patched
1693 // with a multi-instruction mov sequence that will load the 1752 // with a multi-instruction mov sequence that will load the
1694 // destination register with the address of the label. 1753 // destination register with the address of the label.
1695 // 1754 //
1696 // target_at extracts the link and target_at_put patches the instructions. 1755 // target_at extracts the link and target_at_put patches the instructions.
1697 BlockTrampolinePoolScope block_trampoline_pool(this); 1756 BlockTrampolinePoolScope block_trampoline_pool(this);
1698 emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask)); 1757 emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask));
1699 emit(dst.code()); 1758 emit(dst.code());
1700 DCHECK(kMovInstructions >= 2); 1759 DCHECK(kMovInstructionsNoConstantPool >= 2);
1701 for (int i = 0; i < kMovInstructions - 2; i++) nop(); 1760 for (int i = 0; i < kMovInstructionsNoConstantPool - 2; i++) nop();
1702 } 1761 }
1703 } 1762 }
1704 1763
1705 1764
1706 void Assembler::emit_label_addr(Label* label) { 1765 void Assembler::emit_label_addr(Label* label) {
1707 CheckBuffer(); 1766 CheckBuffer();
1708 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE); 1767 RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE);
1709 int position = link(label); 1768 int position = link(label);
1710 if (label->is_bound()) { 1769 if (label->is_bound()) {
1711 // Keep internal references relative until EmitRelocations. 1770 // Keep internal references relative until EmitRelocations.
(...skipping 661 matching lines...) Expand 10 before | Expand all | Expand 10 after
2373 // Number of branches to unbound label at this point is zero, so we can 2432 // Number of branches to unbound label at this point is zero, so we can
2374 // move next buffer check to maximum. 2433 // move next buffer check to maximum.
2375 next_buffer_check_ = 2434 next_buffer_check_ =
2376 pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize; 2435 pc_offset() + kMaxCondBranchReach - kMaxBlockTrampolineSectionSize;
2377 } 2436 }
2378 return; 2437 return;
2379 } 2438 }
2380 2439
2381 2440
2382 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { 2441 Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
2383 DCHECK(!FLAG_enable_ool_constant_pool); 2442 UNREACHABLE();
2384 return isolate->factory()->empty_constant_pool_array(); 2443 return isolate->factory()->empty_constant_pool_array();
2385 } 2444 }
2386 2445
2387 2446
2388 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { 2447 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
2389 DCHECK(!FLAG_enable_ool_constant_pool); 2448 UNREACHABLE();
2449 }
2450
2451
2452 ConstantPoolBuilder::ConstantPoolBuilder() : size_(0) { entries_.reserve(64); }
2453
2454
2455 void ConstantPoolBuilder::AddEntry(ConstantPoolEntry& entry, bool sharing_ok) {
2456 DCHECK(!IsEmitted());
2457
2458 if (sharing_ok) {
2459 // Try to merge entries
2460 size_t i;
2461 std::vector<ConstantPoolEntry>::const_iterator it;
2462 for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) {
2463 if (it->merged_index_ != -2 && entry.IsEqual(*it)) {
2464 // Merge with found entry.
2465 entry.merged_index_ = i;
2466 break;
2467 }
2468 }
2469 } else {
2470 // Ensure this entry remains unique
2471 entry.merged_index_ = -2;
2472 }
2473
2474 entries_.push_back(entry);
2475
2476 if (entry.merged_index_ < 0) {
2477 // Not merged, so update the appropriate count and size.
2478 number_of_entries_.increment(entry.type());
2479 size_ = number_of_entries_.size();
2480 }
2481 }
2482
2483
2484 void ConstantPoolBuilder::EmitGroup(Assembler* assm, int entrySize) {
2485 int base = label_.pos();
2486 for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin();
2487 entry != entries_.end(); entry++) {
2488 #if !V8_TARGET_ARCH_PPC64
2489 // Skip entries not in the requested group based on size.
2490 if (entry->size() != entrySize) continue;
2491 #endif
2492
2493 // Update constant pool if necessary and get the entry's offset.
2494 int offset;
2495 if (entry->merged_index_ < 0) {
2496 offset = assm->pc_offset() - base;
2497 entry->merged_index_ = offset; // Stash offset for merged entries.
2498 #if V8_TARGET_ARCH_PPC64
2499 assm->emit_ptr(entry->value_);
2500 #else
2501 if (entrySize == kDoubleSize) {
2502 assm->emit_double(entry->value64_);
2503 } else {
2504 assm->emit_ptr(entry->value_);
2505 }
2506 #endif
2507 } else {
2508 DCHECK(entry->merged_index_ < (entry - entries_.begin()));
2509 offset = entries_[entry->merged_index_].merged_index_;
2510 }
2511
2512 // Patch load instruction with correct offset.
2513 assm->SetConstantPoolOffset(entry->position_, offset);
2514 }
2515 }
2516
2517
2518 // Emit and return position of pool. Zero implies no constant pool.
2519 int ConstantPoolBuilder::Emit(Assembler* assm) {
2520 bool empty = IsEmpty();
2521 bool emitted = IsEmitted();
2522
2523 if (!emitted) {
2524 // Mark start of constant pool. Align if necessary.
2525 if (!empty) assm->CodeTargetAlign();
2526 assm->bind(&label_);
2527 }
2528
2529 int position = empty ? 0 : label_.pos();
2530
2531 if (!(emitted || empty)) {
2532 // Emit in groups based on size. We don't support 32-bit
2533 // constants in 64-bit mode so the only non-pointer-sized entries
2534 // are doubles in 32-bit mode.
2535 #if !V8_TARGET_ARCH_PPC64
2536 // Emit any doubles first for alignment purposes.
2537 if (number_of_entries_.count_of(INT64)) {
2538 EmitGroup(assm, kDoubleSize);
2539 }
2540 #endif
2541 EmitGroup(assm, kPointerSize);
2542 DCHECK(position > 0);
2543 DCHECK(assm->pc_offset() - position == size_);
2544 }
2545
2546 return position;
2390 } 2547 }
2391 } 2548 }
2392 } // namespace v8::internal 2549 } // namespace v8::internal
2393 2550
2394 #endif // V8_TARGET_ARCH_PPC 2551 #endif // V8_TARGET_ARCH_PPC
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698