| Index: src/arm/assembler-arm.cc
|
| diff --git a/src/arm/assembler-arm.cc b/src/arm/assembler-arm.cc
|
| index 6cea58184c9acdcaef4dab3add3baf0f0ea3bf03..22586c558bce34cb37dee56de01538f26ce375f4 100644
|
| --- a/src/arm/assembler-arm.cc
|
| +++ b/src/arm/assembler-arm.cc
|
| @@ -344,6 +344,7 @@ Operand::Operand(Handle<Object> handle) {
|
| ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
|
| imm32_ = reinterpret_cast<intptr_t>(handle.location());
|
| rmode_ = RelocInfo::EMBEDDED_OBJECT;
|
| + weak_in_optimized_code_ = Code::IsWeakObjectInOptimizedCode(obj);
|
| } else {
|
| // no relocation needed
|
| imm32_ = reinterpret_cast<intptr_t>(obj);
|
| @@ -1138,7 +1139,9 @@ bool Operand::is_single_instruction(const Assembler* assembler,
|
| void Assembler::move_32_bit_immediate(Register rd,
|
| const Operand& x,
|
| Condition cond) {
|
| - RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL);
|
| + RelocInfo::WeakMode weak_mode = x.weak_in_optimized_code_ ?
|
| + RelocInfo::WEAK_IN_OPTIMIZED_CODE : RelocInfo::NOT_WEAK;
|
| + RelocInfo rinfo(pc_, x.rmode_, weak_mode, x.imm32_);
|
| if (x.must_output_reloc_info(this)) {
|
| RecordRelocInfo(rinfo);
|
| }
|
| @@ -3536,9 +3539,10 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
|
| }
|
|
|
|
|
| -MaybeObject* Assembler::AllocateConstantPool(Heap* heap) {
|
| +MaybeObject* Assembler::AllocateConstantPool(Heap* heap,
|
| + bool has_weak_pointers) {
|
| ASSERT(FLAG_enable_ool_constant_pool);
|
| - return constant_pool_builder_.Allocate(heap);
|
| + return constant_pool_builder_.Allocate(heap, has_weak_pointers);
|
| }
|
|
|
|
|
| @@ -3554,6 +3558,7 @@ ConstantPoolBuilder::ConstantPoolBuilder()
|
| count_of_64bit_(0),
|
| count_of_code_ptr_(0),
|
| count_of_heap_ptr_(0),
|
| + count_of_weak_ptr_(0),
|
| count_of_32bit_(0) { }
|
|
|
|
|
| @@ -3618,17 +3623,21 @@ void ConstantPoolBuilder::AddEntry(Assembler* assm,
|
| count_of_code_ptr_++;
|
| } else {
|
| ASSERT(IsHeapPtrEntry(rmode));
|
| - count_of_heap_ptr_++;
|
| + if (rinfo.weak_mode() == RelocInfo::WEAK_IN_OPTIMIZED_CODE) {
|
| + count_of_weak_ptr_++;
|
| + } else {
|
| + count_of_heap_ptr_++;
|
| + }
|
| }
|
| }
|
|
|
| // Check if we still have room for another entry given Arm's ldr and vldr
|
| // immediate offset range.
|
| - if (!(is_uint12(ConstantPoolArray::SizeFor(count_of_64bit_,
|
| - count_of_code_ptr_,
|
| - count_of_heap_ptr_,
|
| - count_of_32bit_))) &&
|
| - is_uint10(ConstantPoolArray::SizeFor(count_of_64bit_, 0, 0, 0))) {
|
| + if (!(is_uint12(ConstantPoolArray::SizeFor(
|
| + count_of_64bit_,
|
| + count_of_code_ptr_ + count_of_heap_ptr_ + count_of_weak_ptr_,
|
| + count_of_32bit_))) &&
|
| + is_uint10(ConstantPoolArray::SizeFor(count_of_64bit_, 0, 0))) {
|
| assm->set_constant_pool_full();
|
| }
|
| }
|
| @@ -3643,12 +3652,17 @@ void ConstantPoolBuilder::Relocate(int pc_delta) {
|
| }
|
|
|
|
|
| -MaybeObject* ConstantPoolBuilder::Allocate(Heap* heap) {
|
| +MaybeObject* ConstantPoolBuilder::Allocate(Heap* heap, bool has_weak_pointers) {
|
| if (IsEmpty()) {
|
| return heap->empty_constant_pool_array();
|
| } else {
|
| + if (!has_weak_pointers) {
|
| + // Move potential weak pointers to normal heap pointers.
|
| + count_of_heap_ptr_ += count_of_weak_ptr_;
|
| + count_of_weak_ptr_ = 0;
|
| + }
|
| return heap->AllocateConstantPoolArray(count_of_64bit_, count_of_code_ptr_,
|
| - count_of_heap_ptr_, count_of_32bit_);
|
| + count_of_heap_ptr_, count_of_weak_ptr_, count_of_32bit_);
|
| }
|
| }
|
|
|
| @@ -3658,13 +3672,17 @@ void ConstantPoolBuilder::Populate(Assembler* assm,
|
| ASSERT(constant_pool->count_of_int64_entries() == count_of_64bit_);
|
| ASSERT(constant_pool->count_of_code_ptr_entries() == count_of_code_ptr_);
|
| ASSERT(constant_pool->count_of_heap_ptr_entries() == count_of_heap_ptr_);
|
| + ASSERT(constant_pool->count_of_weak_ptr_entries() == count_of_weak_ptr_);
|
| ASSERT(constant_pool->count_of_int32_entries() == count_of_32bit_);
|
| ASSERT(entries_.size() == merged_indexes_.size());
|
|
|
| int index_64bit = 0;
|
| int index_code_ptr = count_of_64bit_;
|
| int index_heap_ptr = count_of_64bit_ + count_of_code_ptr_;
|
| - int index_32bit = count_of_64bit_ + count_of_code_ptr_ + count_of_heap_ptr_;
|
| + int index_weak_ptr = count_of_64bit_ + count_of_code_ptr_ +
|
| + count_of_heap_ptr_;
|
| + int index_32bit = count_of_64bit_ + count_of_code_ptr_ + count_of_heap_ptr_ +
|
| + count_of_weak_ptr_;
|
|
|
| size_t i;
|
| std::vector<RelocInfo>::const_iterator rinfo;
|
| @@ -3687,10 +3705,18 @@ void ConstantPoolBuilder::Populate(Assembler* assm,
|
| reinterpret_cast<Object *>(rinfo->data()));
|
| } else {
|
| ASSERT(IsHeapPtrEntry(rmode));
|
| - offset = constant_pool->OffsetOfElementAt(index_heap_ptr) -
|
| - kHeapObjectTag;
|
| - constant_pool->set(index_heap_ptr++,
|
| - reinterpret_cast<Object *>(rinfo->data()));
|
| + if (count_of_weak_ptr_ > 0 &&
|
| + rinfo->weak_mode() == RelocInfo::WEAK_IN_OPTIMIZED_CODE) {
|
| + offset = constant_pool->OffsetOfElementAt(index_weak_ptr) -
|
| + kHeapObjectTag;
|
| + constant_pool->set(index_weak_ptr++,
|
| + reinterpret_cast<Object *>(rinfo->data()));
|
| + } else {
|
| + offset = constant_pool->OffsetOfElementAt(index_heap_ptr) -
|
| + kHeapObjectTag;
|
| + constant_pool->set(index_heap_ptr++,
|
| + reinterpret_cast<Object *>(rinfo->data()));
|
| + }
|
| }
|
| merged_indexes_[i] = offset; // Stash offset for merged entries.
|
| } else {
|
| @@ -3721,7 +3747,8 @@ void ConstantPoolBuilder::Populate(Assembler* assm,
|
| ASSERT((index_64bit == count_of_64bit_) &&
|
| (index_code_ptr == (index_64bit + count_of_code_ptr_)) &&
|
| (index_heap_ptr == (index_code_ptr + count_of_heap_ptr_)) &&
|
| - (index_32bit == (index_heap_ptr + count_of_32bit_)));
|
| + (index_weak_ptr == (index_heap_ptr + count_of_weak_ptr_)) &&
|
| + (index_32bit == (index_weak_ptr + count_of_32bit_)));
|
| }
|
|
|
|
|
|
|