Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1740)

Side by Side Diff: src/arm/assembler-arm.cc

Issue 203583007: Support weak heap references in the ConstantPool to support IsWeakObjectInOptimizedCode objects. (Closed) Base URL: https://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 326 matching lines...) Expand 10 before | Expand all | Expand 10 after
337 337
338 Operand::Operand(Handle<Object> handle) { 338 Operand::Operand(Handle<Object> handle) {
339 AllowDeferredHandleDereference using_raw_address; 339 AllowDeferredHandleDereference using_raw_address;
340 rm_ = no_reg; 340 rm_ = no_reg;
341 // Verify all Objects referred by code are NOT in new space. 341 // Verify all Objects referred by code are NOT in new space.
342 Object* obj = *handle; 342 Object* obj = *handle;
343 if (obj->IsHeapObject()) { 343 if (obj->IsHeapObject()) {
344 ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj)); 344 ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
345 imm32_ = reinterpret_cast<intptr_t>(handle.location()); 345 imm32_ = reinterpret_cast<intptr_t>(handle.location());
346 rmode_ = RelocInfo::EMBEDDED_OBJECT; 346 rmode_ = RelocInfo::EMBEDDED_OBJECT;
347 weak_in_optimized_code_ = Code::IsWeakObjectInOptimizedCode(obj);
347 } else { 348 } else {
348 // no relocation needed 349 // no relocation needed
349 imm32_ = reinterpret_cast<intptr_t>(obj); 350 imm32_ = reinterpret_cast<intptr_t>(obj);
350 rmode_ = RelocInfo::NONE32; 351 rmode_ = RelocInfo::NONE32;
351 } 352 }
352 } 353 }
353 354
354 355
355 Operand::Operand(Register rm, ShiftOp shift_op, int shift_imm) { 356 Operand::Operand(Register rm, ShiftOp shift_op, int shift_imm) {
356 ASSERT(is_uint5(shift_imm)); 357 ASSERT(is_uint5(shift_imm));
(...skipping 774 matching lines...) Expand 10 before | Expand all | Expand 10 after
1131 // No use of constant pool and the immediate operand can be encoded as a 1132 // No use of constant pool and the immediate operand can be encoded as a
1132 // shifter operand. 1133 // shifter operand.
1133 return true; 1134 return true;
1134 } 1135 }
1135 } 1136 }
1136 1137
1137 1138
1138 void Assembler::move_32_bit_immediate(Register rd, 1139 void Assembler::move_32_bit_immediate(Register rd,
1139 const Operand& x, 1140 const Operand& x,
1140 Condition cond) { 1141 Condition cond) {
1141 RelocInfo rinfo(pc_, x.rmode_, x.imm32_, NULL); 1142 RelocInfo::WeakMode weak_mode = x.weak_in_optimized_code_ ?
1143 RelocInfo::WEAK_IN_OPTIMIZED_CODE : RelocInfo::NOT_WEAK;
1144 RelocInfo rinfo(pc_, x.rmode_, weak_mode, x.imm32_);
1142 if (x.must_output_reloc_info(this)) { 1145 if (x.must_output_reloc_info(this)) {
1143 RecordRelocInfo(rinfo); 1146 RecordRelocInfo(rinfo);
1144 } 1147 }
1145 1148
1146 if (use_mov_immediate_load(x, this)) { 1149 if (use_mov_immediate_load(x, this)) {
1147 Register target = rd.code() == pc.code() ? ip : rd; 1150 Register target = rd.code() == pc.code() ? ip : rd;
1148 // TODO(rmcilroy): add ARMv6 support for immediate loads. 1151 // TODO(rmcilroy): add ARMv6 support for immediate loads.
1149 ASSERT(CpuFeatures::IsSupported(ARMv7)); 1152 ASSERT(CpuFeatures::IsSupported(ARMv7));
1150 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) { 1153 if (!FLAG_enable_ool_constant_pool && x.must_output_reloc_info(this)) {
1151 // Make sure the movw/movt doesn't get separated. 1154 // Make sure the movw/movt doesn't get separated.
(...skipping 2377 matching lines...) Expand 10 before | Expand all | Expand 10 after
3529 bind(&after_pool); 3532 bind(&after_pool);
3530 } 3533 }
3531 } 3534 }
3532 3535
3533 // Since a constant pool was just emitted, move the check offset forward by 3536 // Since a constant pool was just emitted, move the check offset forward by
3534 // the standard interval. 3537 // the standard interval.
3535 next_buffer_check_ = pc_offset() + kCheckPoolInterval; 3538 next_buffer_check_ = pc_offset() + kCheckPoolInterval;
3536 } 3539 }
3537 3540
3538 3541
3539 MaybeObject* Assembler::AllocateConstantPool(Heap* heap) { 3542 MaybeObject* Assembler::AllocateConstantPool(Heap* heap,
3543 bool has_weak_pointers) {
3540 ASSERT(FLAG_enable_ool_constant_pool); 3544 ASSERT(FLAG_enable_ool_constant_pool);
3541 return constant_pool_builder_.Allocate(heap); 3545 return constant_pool_builder_.Allocate(heap, has_weak_pointers);
3542 } 3546 }
3543 3547
3544 3548
3545 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) { 3549 void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
3546 ASSERT(FLAG_enable_ool_constant_pool); 3550 ASSERT(FLAG_enable_ool_constant_pool);
3547 constant_pool_builder_.Populate(this, constant_pool); 3551 constant_pool_builder_.Populate(this, constant_pool);
3548 } 3552 }
3549 3553
3550 3554
3551 ConstantPoolBuilder::ConstantPoolBuilder() 3555 ConstantPoolBuilder::ConstantPoolBuilder()
3552 : entries_(), 3556 : entries_(),
3553 merged_indexes_(), 3557 merged_indexes_(),
3554 count_of_64bit_(0), 3558 count_of_64bit_(0),
3555 count_of_code_ptr_(0), 3559 count_of_code_ptr_(0),
3556 count_of_heap_ptr_(0), 3560 count_of_heap_ptr_(0),
3561 count_of_weak_ptr_(0),
3557 count_of_32bit_(0) { } 3562 count_of_32bit_(0) { }
3558 3563
3559 3564
3560 bool ConstantPoolBuilder::IsEmpty() { 3565 bool ConstantPoolBuilder::IsEmpty() {
3561 return entries_.size() == 0; 3566 return entries_.size() == 0;
3562 } 3567 }
3563 3568
3564 3569
3565 bool ConstantPoolBuilder::Is64BitEntry(RelocInfo::Mode rmode) { 3570 bool ConstantPoolBuilder::Is64BitEntry(RelocInfo::Mode rmode) {
3566 return rmode == RelocInfo::NONE64; 3571 return rmode == RelocInfo::NONE64;
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
3611 if (merged_index == -1) { 3616 if (merged_index == -1) {
3612 // Not merged, so update the appropriate count. 3617 // Not merged, so update the appropriate count.
3613 if (Is64BitEntry(rmode)) { 3618 if (Is64BitEntry(rmode)) {
3614 count_of_64bit_++; 3619 count_of_64bit_++;
3615 } else if (Is32BitEntry(rmode)) { 3620 } else if (Is32BitEntry(rmode)) {
3616 count_of_32bit_++; 3621 count_of_32bit_++;
3617 } else if (IsCodePtrEntry(rmode)) { 3622 } else if (IsCodePtrEntry(rmode)) {
3618 count_of_code_ptr_++; 3623 count_of_code_ptr_++;
3619 } else { 3624 } else {
3620 ASSERT(IsHeapPtrEntry(rmode)); 3625 ASSERT(IsHeapPtrEntry(rmode));
3621 count_of_heap_ptr_++; 3626 if (rinfo.weak_mode() == RelocInfo::WEAK_IN_OPTIMIZED_CODE) {
3627 count_of_weak_ptr_++;
3628 } else {
3629 count_of_heap_ptr_++;
3630 }
3622 } 3631 }
3623 } 3632 }
3624 3633
3625 // Check if we still have room for another entry given Arm's ldr and vldr 3634 // Check if we still have room for another entry given Arm's ldr and vldr
3626 // immediate offset range. 3635 // immediate offset range.
3627 if (!(is_uint12(ConstantPoolArray::SizeFor(count_of_64bit_, 3636 if (!(is_uint12(ConstantPoolArray::SizeFor(
3628 count_of_code_ptr_, 3637 count_of_64bit_,
3629 count_of_heap_ptr_, 3638 count_of_code_ptr_ + count_of_heap_ptr_ + count_of_weak_ptr_,
3630 count_of_32bit_))) && 3639 count_of_32bit_))) &&
3631 is_uint10(ConstantPoolArray::SizeFor(count_of_64bit_, 0, 0, 0))) { 3640 is_uint10(ConstantPoolArray::SizeFor(count_of_64bit_, 0, 0))) {
3632 assm->set_constant_pool_full(); 3641 assm->set_constant_pool_full();
3633 } 3642 }
3634 } 3643 }
3635 3644
3636 3645
3637 void ConstantPoolBuilder::Relocate(int pc_delta) { 3646 void ConstantPoolBuilder::Relocate(int pc_delta) {
3638 for (std::vector<RelocInfo>::iterator rinfo = entries_.begin(); 3647 for (std::vector<RelocInfo>::iterator rinfo = entries_.begin();
3639 rinfo != entries_.end(); rinfo++) { 3648 rinfo != entries_.end(); rinfo++) {
3640 ASSERT(rinfo->rmode() != RelocInfo::JS_RETURN); 3649 ASSERT(rinfo->rmode() != RelocInfo::JS_RETURN);
3641 rinfo->set_pc(rinfo->pc() + pc_delta); 3650 rinfo->set_pc(rinfo->pc() + pc_delta);
3642 } 3651 }
3643 } 3652 }
3644 3653
3645 3654
3646 MaybeObject* ConstantPoolBuilder::Allocate(Heap* heap) { 3655 MaybeObject* ConstantPoolBuilder::Allocate(Heap* heap, bool has_weak_pointers) {
3647 if (IsEmpty()) { 3656 if (IsEmpty()) {
3648 return heap->empty_constant_pool_array(); 3657 return heap->empty_constant_pool_array();
3649 } else { 3658 } else {
3659 if (!has_weak_pointers) {
3660 // Move potential weak pointers to normal heap pointers.
3661 count_of_heap_ptr_ += count_of_weak_ptr_;
3662 count_of_weak_ptr_ = 0;
3663 }
3650 return heap->AllocateConstantPoolArray(count_of_64bit_, count_of_code_ptr_, 3664 return heap->AllocateConstantPoolArray(count_of_64bit_, count_of_code_ptr_,
3651 count_of_heap_ptr_, count_of_32bit_); 3665 count_of_heap_ptr_, count_of_weak_ptr_, count_of_32bit_);
3652 } 3666 }
3653 } 3667 }
3654 3668
3655 3669
3656 void ConstantPoolBuilder::Populate(Assembler* assm, 3670 void ConstantPoolBuilder::Populate(Assembler* assm,
3657 ConstantPoolArray* constant_pool) { 3671 ConstantPoolArray* constant_pool) {
3658 ASSERT(constant_pool->count_of_int64_entries() == count_of_64bit_); 3672 ASSERT(constant_pool->count_of_int64_entries() == count_of_64bit_);
3659 ASSERT(constant_pool->count_of_code_ptr_entries() == count_of_code_ptr_); 3673 ASSERT(constant_pool->count_of_code_ptr_entries() == count_of_code_ptr_);
3660 ASSERT(constant_pool->count_of_heap_ptr_entries() == count_of_heap_ptr_); 3674 ASSERT(constant_pool->count_of_heap_ptr_entries() == count_of_heap_ptr_);
3675 ASSERT(constant_pool->count_of_weak_ptr_entries() == count_of_weak_ptr_);
3661 ASSERT(constant_pool->count_of_int32_entries() == count_of_32bit_); 3676 ASSERT(constant_pool->count_of_int32_entries() == count_of_32bit_);
3662 ASSERT(entries_.size() == merged_indexes_.size()); 3677 ASSERT(entries_.size() == merged_indexes_.size());
3663 3678
3664 int index_64bit = 0; 3679 int index_64bit = 0;
3665 int index_code_ptr = count_of_64bit_; 3680 int index_code_ptr = count_of_64bit_;
3666 int index_heap_ptr = count_of_64bit_ + count_of_code_ptr_; 3681 int index_heap_ptr = count_of_64bit_ + count_of_code_ptr_;
3667 int index_32bit = count_of_64bit_ + count_of_code_ptr_ + count_of_heap_ptr_; 3682 int index_weak_ptr = count_of_64bit_ + count_of_code_ptr_ +
3683 count_of_heap_ptr_;
3684 int index_32bit = count_of_64bit_ + count_of_code_ptr_ + count_of_heap_ptr_ +
3685 count_of_weak_ptr_;
3668 3686
3669 size_t i; 3687 size_t i;
3670 std::vector<RelocInfo>::const_iterator rinfo; 3688 std::vector<RelocInfo>::const_iterator rinfo;
3671 for (rinfo = entries_.begin(), i = 0; rinfo != entries_.end(); rinfo++, i++) { 3689 for (rinfo = entries_.begin(), i = 0; rinfo != entries_.end(); rinfo++, i++) {
3672 RelocInfo::Mode rmode = rinfo->rmode(); 3690 RelocInfo::Mode rmode = rinfo->rmode();
3673 3691
3674 // Update constant pool if necessary and get the entry's offset. 3692 // Update constant pool if necessary and get the entry's offset.
3675 int offset; 3693 int offset;
3676 if (merged_indexes_[i] == -1) { 3694 if (merged_indexes_[i] == -1) {
3677 if (Is64BitEntry(rmode)) { 3695 if (Is64BitEntry(rmode)) {
3678 offset = constant_pool->OffsetOfElementAt(index_64bit) - kHeapObjectTag; 3696 offset = constant_pool->OffsetOfElementAt(index_64bit) - kHeapObjectTag;
3679 constant_pool->set(index_64bit++, rinfo->data64()); 3697 constant_pool->set(index_64bit++, rinfo->data64());
3680 } else if (Is32BitEntry(rmode)) { 3698 } else if (Is32BitEntry(rmode)) {
3681 offset = constant_pool->OffsetOfElementAt(index_32bit) - kHeapObjectTag; 3699 offset = constant_pool->OffsetOfElementAt(index_32bit) - kHeapObjectTag;
3682 constant_pool->set(index_32bit++, static_cast<int32_t>(rinfo->data())); 3700 constant_pool->set(index_32bit++, static_cast<int32_t>(rinfo->data()));
3683 } else if (IsCodePtrEntry(rmode)) { 3701 } else if (IsCodePtrEntry(rmode)) {
3684 offset = constant_pool->OffsetOfElementAt(index_code_ptr) - 3702 offset = constant_pool->OffsetOfElementAt(index_code_ptr) -
3685 kHeapObjectTag; 3703 kHeapObjectTag;
3686 constant_pool->set(index_code_ptr++, 3704 constant_pool->set(index_code_ptr++,
3687 reinterpret_cast<Object *>(rinfo->data())); 3705 reinterpret_cast<Object *>(rinfo->data()));
3688 } else { 3706 } else {
3689 ASSERT(IsHeapPtrEntry(rmode)); 3707 ASSERT(IsHeapPtrEntry(rmode));
3690 offset = constant_pool->OffsetOfElementAt(index_heap_ptr) - 3708 if (count_of_weak_ptr_ > 0 &&
3691 kHeapObjectTag; 3709 rinfo->weak_mode() == RelocInfo::WEAK_IN_OPTIMIZED_CODE) {
3692 constant_pool->set(index_heap_ptr++, 3710 offset = constant_pool->OffsetOfElementAt(index_weak_ptr) -
3693 reinterpret_cast<Object *>(rinfo->data())); 3711 kHeapObjectTag;
3712 constant_pool->set(index_weak_ptr++,
3713 reinterpret_cast<Object *>(rinfo->data()));
3714 } else {
3715 offset = constant_pool->OffsetOfElementAt(index_heap_ptr) -
3716 kHeapObjectTag;
3717 constant_pool->set(index_heap_ptr++,
3718 reinterpret_cast<Object *>(rinfo->data()));
3719 }
3694 } 3720 }
3695 merged_indexes_[i] = offset; // Stash offset for merged entries. 3721 merged_indexes_[i] = offset; // Stash offset for merged entries.
3696 } else { 3722 } else {
3697 size_t merged_index = static_cast<size_t>(merged_indexes_[i]); 3723 size_t merged_index = static_cast<size_t>(merged_indexes_[i]);
3698 ASSERT(merged_index < merged_indexes_.size() && merged_index < i); 3724 ASSERT(merged_index < merged_indexes_.size() && merged_index < i);
3699 offset = merged_indexes_[merged_index]; 3725 offset = merged_indexes_[merged_index];
3700 } 3726 }
3701 3727
3702 // Patch vldr/ldr instruction with correct offset. 3728 // Patch vldr/ldr instruction with correct offset.
3703 Instr instr = assm->instr_at(rinfo->pc()); 3729 Instr instr = assm->instr_at(rinfo->pc());
(...skipping 10 matching lines...) Expand all
3714 Assembler::GetLdrRegisterImmediateOffset(instr) == 0)); 3740 Assembler::GetLdrRegisterImmediateOffset(instr) == 0));
3715 ASSERT(is_uint12(offset)); 3741 ASSERT(is_uint12(offset));
3716 assm->instr_at_put(rinfo->pc(), 3742 assm->instr_at_put(rinfo->pc(),
3717 Assembler::SetLdrRegisterImmediateOffset(instr, offset)); 3743 Assembler::SetLdrRegisterImmediateOffset(instr, offset));
3718 } 3744 }
3719 } 3745 }
3720 3746
3721 ASSERT((index_64bit == count_of_64bit_) && 3747 ASSERT((index_64bit == count_of_64bit_) &&
3722 (index_code_ptr == (index_64bit + count_of_code_ptr_)) && 3748 (index_code_ptr == (index_64bit + count_of_code_ptr_)) &&
3723 (index_heap_ptr == (index_code_ptr + count_of_heap_ptr_)) && 3749 (index_heap_ptr == (index_code_ptr + count_of_heap_ptr_)) &&
3724 (index_32bit == (index_heap_ptr + count_of_32bit_))); 3750 (index_weak_ptr == (index_heap_ptr + count_of_weak_ptr_)) &&
3751 (index_32bit == (index_weak_ptr + count_of_32bit_)));
3725 } 3752 }
3726 3753
3727 3754
3728 } } // namespace v8::internal 3755 } } // namespace v8::internal
3729 3756
3730 #endif // V8_TARGET_ARCH_ARM 3757 #endif // V8_TARGET_ARCH_ARM
OLDNEW
« no previous file with comments | « src/arm/assembler-arm.h ('k') | src/assembler.h » ('j') | src/objects-visiting-inl.h » ('J')

Powered by Google App Engine
This is Rietveld 408576698