Index: src/ppc/assembler-ppc.cc |
diff --git a/src/ppc/assembler-ppc.cc b/src/ppc/assembler-ppc.cc |
index 8889aa4b26da716f01f8be555c1a3e111123e94e..adf8cb82d1d40b872ef5c9d78f4e82c885e32b40 100644 |
--- a/src/ppc/assembler-ppc.cc |
+++ b/src/ppc/assembler-ppc.cc |
@@ -149,13 +149,18 @@ const int RelocInfo::kApplyMask = 1 << RelocInfo::INTERNAL_REFERENCE | |
bool RelocInfo::IsCodedSpecially() { |
// The deserializer needs to know whether a pointer is specially |
// coded. Being specially coded on PPC means that it is a lis/ori |
- // instruction sequence, and these are always the case inside code |
- // objects. |
+ // instruction sequence or is an out of line constant pool entry, |
+ // and these are always the case inside code objects. |
return true; |
} |
bool RelocInfo::IsInConstantPool() { |
+ if (FLAG_enable_embedded_constant_pool) { |
+ Address constant_pool = host_->constant_pool(); |
+ return (constant_pool && |
+ (pc_ >= constant_pool || Assembler::IsConstantPoolLoadStart(pc_))); |
+ } |
return false; |
} |
@@ -202,11 +207,13 @@ MemOperand::MemOperand(Register ra, Register rb) { |
Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) |
: AssemblerBase(isolate, buffer, buffer_size), |
recorded_ast_id_(TypeFeedbackId::None()), |
+ constant_pool_builder_(), |
positions_recorder_(this) { |
reloc_info_writer.Reposition(buffer_ + buffer_size_, pc_); |
no_trampoline_pool_before_ = 0; |
trampoline_pool_blocked_nesting_ = 0; |
+ constant_pool_entry_sharing_blocked_nesting_ = 0; |
// We leave space (kMaxBlockTrampolineSectionSize) |
// for BlockTrampolinePoolScope buffer. |
next_buffer_check_ = |
@@ -222,6 +229,9 @@ Assembler::Assembler(Isolate* isolate, void* buffer, int buffer_size) |
void Assembler::GetCode(CodeDesc* desc) { |
+ // Emit constant pool if necessary. |
+ int offset = EmitConstantPool(); |
+ |
EmitRelocations(); |
// Set up code descriptor. |
@@ -229,6 +239,7 @@ void Assembler::GetCode(CodeDesc* desc) { |
desc->buffer_size = buffer_size_; |
desc->instr_size = pc_offset(); |
desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); |
+ desc->constant_pool_size = (offset ? desc->instr_size - offset : 0); |
desc->origin = this; |
} |
@@ -472,7 +483,8 @@ void Assembler::target_at_put(int pos, int target_pos) { |
// Load the address of the label in a register. |
Register dst = Register::from_code(instr_at(pos + kInstrSize)); |
CodePatcher patcher(reinterpret_cast<byte*>(buffer_ + pos), |
- kMovInstructions, CodePatcher::DONT_FLUSH); |
+ kMovInstructionsNoConstantPool, |
+ CodePatcher::DONT_FLUSH); |
// Keep internal references relative until EmitRelocations. |
patcher.masm()->bitwise_mov(dst, target_pos); |
break; |
@@ -1500,6 +1512,38 @@ void Assembler::function_descriptor() { |
} |
+int Assembler::instructions_required_for_mov(const Operand& x) const { |
+ bool canOptimize = |
+ !(x.must_output_reloc_info(this) || is_trampoline_pool_blocked()); |
+ if (use_constant_pool_for_mov(x, canOptimize)) { |
+ // Current usage guarantees that all constant pool references can |
+ // use the same sequence. |
+ return kMovInstructionsConstantPool; |
+ } |
+ DCHECK(!canOptimize); |
+ return kMovInstructionsNoConstantPool; |
+} |
+ |
+ |
+bool Assembler::use_constant_pool_for_mov(const Operand& x, |
+ bool canOptimize) const { |
+ if (!FLAG_enable_embedded_constant_pool || !is_constant_pool_available() || |
+ is_constant_pool_full()) { |
+ // If there is no constant pool available, we must use a mov |
+ // immediate sequence. |
+ return false; |
+ } |
+ |
+ intptr_t value = x.immediate(); |
+ if (canOptimize && is_int16(value)) { |
+ // Prefer a single-instruction load-immediate. |
+ return false; |
+ } |
+ |
+ return true; |
+} |
+ |
+ |
void Assembler::EnsureSpaceFor(int space_needed) { |
if (buffer_space() <= (kGap + space_needed)) { |
GrowBuffer(space_needed); |
@@ -1532,6 +1576,20 @@ void Assembler::mov(Register dst, const Operand& src) { |
canOptimize = |
!(relocatable || (is_trampoline_pool_blocked() && !is_int16(value))); |
+ if (use_constant_pool_for_mov(src, canOptimize)) { |
+ DCHECK(is_constant_pool_available()); |
+ if (relocatable) { |
+ RecordRelocInfo(src.rmode_); |
+ } |
+ ConstantPoolAddEntry(src.rmode_, value); |
+#if V8_TARGET_ARCH_PPC64 |
+ ld(dst, MemOperand(kConstantPoolRegister, 0)); |
+#else |
+ lwz(dst, MemOperand(kConstantPoolRegister, 0)); |
+#endif |
+ return; |
+ } |
+ |
if (canOptimize) { |
if (is_int16(value)) { |
li(dst, Operand(value)); |
@@ -1673,6 +1731,7 @@ void Assembler::add_label_offset(Register dst, Register base, Label* label, |
} |
+// TODO(mbrandy): allow loading internal reference from constant pool |
void Assembler::mov_label_addr(Register dst, Label* label) { |
CheckBuffer(); |
RecordRelocInfo(RelocInfo::INTERNAL_REFERENCE_ENCODED); |
@@ -1697,8 +1756,8 @@ void Assembler::mov_label_addr(Register dst, Label* label) { |
BlockTrampolinePoolScope block_trampoline_pool(this); |
emit(kUnboundMovLabelAddrOpcode | (link & kImm26Mask)); |
emit(dst.code()); |
- DCHECK(kMovInstructions >= 2); |
- for (int i = 0; i < kMovInstructions - 2; i++) nop(); |
+ DCHECK(kMovInstructionsNoConstantPool >= 2); |
+ for (int i = 0; i < kMovInstructionsNoConstantPool - 2; i++) nop(); |
} |
} |
@@ -2380,13 +2439,111 @@ void Assembler::CheckTrampolinePool() { |
Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) { |
- DCHECK(!FLAG_enable_ool_constant_pool); |
+ UNREACHABLE(); |
return isolate->factory()->empty_constant_pool_array(); |
} |
void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { |
- DCHECK(!FLAG_enable_ool_constant_pool); |
+ UNREACHABLE(); |
+} |
+ |
+ |
+ConstantPoolBuilder::ConstantPoolBuilder() : size_(0) { entries_.reserve(64); } |
+ |
+ |
+void ConstantPoolBuilder::AddEntry(ConstantPoolEntry& entry, bool sharing_ok) { |
+ DCHECK(!IsEmitted()); |
+ |
+ if (sharing_ok) { |
+ // Try to merge entries |
+ size_t i; |
+ std::vector<ConstantPoolEntry>::const_iterator it; |
+ for (it = entries_.begin(), i = 0; it != entries_.end(); it++, i++) { |
+ if (it->merged_index_ != -2 && entry.IsEqual(*it)) { |
+ // Merge with found entry. |
+ entry.merged_index_ = i; |
+ break; |
+ } |
+ } |
+ } else { |
+ // Ensure this entry remains unique |
+ entry.merged_index_ = -2; |
+ } |
+ |
+ entries_.push_back(entry); |
+ |
+ if (entry.merged_index_ < 0) { |
+ // Not merged, so update the appropriate count and size. |
+ number_of_entries_.increment(entry.type()); |
+ size_ = number_of_entries_.size(); |
+ } |
+} |
+ |
+ |
+void ConstantPoolBuilder::EmitGroup(Assembler* assm, int entrySize) { |
+ int base = label_.pos(); |
+ for (std::vector<ConstantPoolEntry>::iterator entry = entries_.begin(); |
+ entry != entries_.end(); entry++) { |
+#if !V8_TARGET_ARCH_PPC64 |
+ // Skip entries not in the requested group based on size. |
+ if (entry->size() != entrySize) continue; |
+#endif |
+ |
+ // Update constant pool if necessary and get the entry's offset. |
+ int offset; |
+ if (entry->merged_index_ < 0) { |
+ offset = assm->pc_offset() - base; |
+ entry->merged_index_ = offset; // Stash offset for merged entries. |
+#if V8_TARGET_ARCH_PPC64 |
+ assm->emit_ptr(entry->value_); |
+#else |
+ if (entrySize == kDoubleSize) { |
+ assm->emit_double(entry->value64_); |
+ } else { |
+ assm->emit_ptr(entry->value_); |
+ } |
+#endif |
+ } else { |
+ DCHECK(entry->merged_index_ < (entry - entries_.begin())); |
+ offset = entries_[entry->merged_index_].merged_index_; |
+ } |
+ |
+ // Patch load instruction with correct offset. |
+ assm->SetConstantPoolOffset(entry->position_, offset); |
+ } |
+} |
+ |
+ |
+// Emit and return position of pool. Zero implies no constant pool. |
+int ConstantPoolBuilder::Emit(Assembler* assm) { |
+ bool empty = IsEmpty(); |
+ bool emitted = IsEmitted(); |
+ |
+ if (!emitted) { |
+ // Mark start of constant pool. Align if necessary. |
+ if (!empty) assm->CodeTargetAlign(); |
+ assm->bind(&label_); |
+ } |
+ |
+ int position = empty ? 0 : label_.pos(); |
+ |
+ if (!(emitted || empty)) { |
+// Emit in groups based on size. We don't support 32-bit |
+// constants in 64-bit mode so the only non-pointer-sized entries |
+// are doubles in 32-bit mode. |
+#if !V8_TARGET_ARCH_PPC64 |
+ // Emit any doubles first for alignment purposes. |
+ if (number_of_entries_.count_of(INT64)) { |
+ EmitGroup(assm, kDoubleSize); |
+ } |
+#endif |
+ EmitGroup(assm, kPointerSize); |
+ DCHECK(position > 0); |
+ DCHECK(assm->pc_offset() - position == size_); |
+ } |
+ |
+ return position; |
} |
} |
} // namespace v8::internal |