| OLD | NEW |
| 1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 26 matching lines...) Expand all Loading... |
| 37 namespace internal { | 37 namespace internal { |
| 38 | 38 |
| 39 | 39 |
| 40 void RelocInfo::apply(intptr_t delta) { | 40 void RelocInfo::apply(intptr_t delta) { |
| 41 UNIMPLEMENTED(); | 41 UNIMPLEMENTED(); |
| 42 } | 42 } |
| 43 | 43 |
| 44 | 44 |
| 45 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { | 45 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { |
| 46 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); | 46 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); |
| 47 Assembler::set_target_address_at(pc_, target); | 47 Assembler::set_target_address_at(pc_, host_, target); |
| 48 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) { | 48 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) { |
| 49 Object* target_code = Code::GetCodeFromTargetAddress(target); | 49 Object* target_code = Code::GetCodeFromTargetAddress(target); |
| 50 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( | 50 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( |
| 51 host(), this, HeapObject::cast(target_code)); | 51 host(), this, HeapObject::cast(target_code)); |
| 52 } | 52 } |
| 53 } | 53 } |
| 54 | 54 |
| 55 | 55 |
| 56 inline unsigned CPURegister::code() const { | 56 inline unsigned CPURegister::code() const { |
| 57 ASSERT(IsValid()); | 57 ASSERT(IsValid()); |
| (...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 102 return true; | 102 return true; |
| 103 } else { | 103 } else { |
| 104 ASSERT(IsNone()); | 104 ASSERT(IsNone()); |
| 105 return false; | 105 return false; |
| 106 } | 106 } |
| 107 } | 107 } |
| 108 | 108 |
| 109 | 109 |
| 110 inline bool CPURegister::IsValidRegister() const { | 110 inline bool CPURegister::IsValidRegister() const { |
| 111 return IsRegister() && | 111 return IsRegister() && |
| 112 ((reg_size == kWRegSize) || (reg_size == kXRegSize)) && | 112 ((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)) && |
| 113 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); | 113 ((reg_code < kNumberOfRegisters) || (reg_code == kSPRegInternalCode)); |
| 114 } | 114 } |
| 115 | 115 |
| 116 | 116 |
| 117 inline bool CPURegister::IsValidFPRegister() const { | 117 inline bool CPURegister::IsValidFPRegister() const { |
| 118 return IsFPRegister() && | 118 return IsFPRegister() && |
| 119 ((reg_size == kSRegSize) || (reg_size == kDRegSize)) && | 119 ((reg_size == kSRegSizeInBits) || (reg_size == kDRegSizeInBits)) && |
| 120 (reg_code < kNumberOfFPRegisters); | 120 (reg_code < kNumberOfFPRegisters); |
| 121 } | 121 } |
| 122 | 122 |
| 123 | 123 |
| 124 inline bool CPURegister::IsNone() const { | 124 inline bool CPURegister::IsNone() const { |
| 125 // kNoRegister types should always have size 0 and code 0. | 125 // kNoRegister types should always have size 0 and code 0. |
| 126 ASSERT((reg_type != kNoRegister) || (reg_code == 0)); | 126 ASSERT((reg_type != kNoRegister) || (reg_code == 0)); |
| 127 ASSERT((reg_type != kNoRegister) || (reg_size == 0)); | 127 ASSERT((reg_type != kNoRegister) || (reg_size == 0)); |
| 128 | 128 |
| 129 return reg_type == kNoRegister; | 129 return reg_type == kNoRegister; |
| (...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 172 inline void CPURegList::Combine(const CPURegList& other) { | 172 inline void CPURegList::Combine(const CPURegList& other) { |
| 173 ASSERT(IsValid()); | 173 ASSERT(IsValid()); |
| 174 ASSERT(other.type() == type_); | 174 ASSERT(other.type() == type_); |
| 175 ASSERT(other.RegisterSizeInBits() == size_); | 175 ASSERT(other.RegisterSizeInBits() == size_); |
| 176 list_ |= other.list(); | 176 list_ |= other.list(); |
| 177 } | 177 } |
| 178 | 178 |
| 179 | 179 |
| 180 inline void CPURegList::Remove(const CPURegList& other) { | 180 inline void CPURegList::Remove(const CPURegList& other) { |
| 181 ASSERT(IsValid()); | 181 ASSERT(IsValid()); |
| 182 ASSERT(other.type() == type_); | 182 if (other.type() == type_) { |
| 183 ASSERT(other.RegisterSizeInBits() == size_); | 183 list_ &= ~other.list(); |
| 184 list_ &= ~other.list(); | 184 } |
| 185 } | 185 } |
| 186 | 186 |
| 187 | 187 |
| 188 inline void CPURegList::Combine(const CPURegister& other) { | 188 inline void CPURegList::Combine(const CPURegister& other) { |
| 189 ASSERT(other.type() == type_); | 189 ASSERT(other.type() == type_); |
| 190 ASSERT(other.SizeInBits() == size_); | 190 ASSERT(other.SizeInBits() == size_); |
| 191 Combine(other.code()); | 191 Combine(other.code()); |
| 192 } | 192 } |
| 193 | 193 |
| 194 | 194 |
| 195 inline void CPURegList::Remove(const CPURegister& other) { | 195 inline void CPURegList::Remove(const CPURegister& other1, |
| 196 ASSERT(other.type() == type_); | 196 const CPURegister& other2, |
| 197 ASSERT(other.SizeInBits() == size_); | 197 const CPURegister& other3, |
| 198 Remove(other.code()); | 198 const CPURegister& other4) { |
| 199 if (!other1.IsNone() && (other1.type() == type_)) Remove(other1.code()); |
| 200 if (!other2.IsNone() && (other2.type() == type_)) Remove(other2.code()); |
| 201 if (!other3.IsNone() && (other3.type() == type_)) Remove(other3.code()); |
| 202 if (!other4.IsNone() && (other4.type() == type_)) Remove(other4.code()); |
| 199 } | 203 } |
| 200 | 204 |
| 201 | 205 |
| 202 inline void CPURegList::Combine(int code) { | 206 inline void CPURegList::Combine(int code) { |
| 203 ASSERT(IsValid()); | 207 ASSERT(IsValid()); |
| 204 ASSERT(CPURegister::Create(code, size_, type_).IsValid()); | 208 ASSERT(CPURegister::Create(code, size_, type_).IsValid()); |
| 205 list_ |= (1UL << code); | 209 list_ |= (1UL << code); |
| 206 } | 210 } |
| 207 | 211 |
| 208 | 212 |
| 209 inline void CPURegList::Remove(int code) { | 213 inline void CPURegList::Remove(int code) { |
| 210 ASSERT(IsValid()); | 214 ASSERT(IsValid()); |
| 211 ASSERT(CPURegister::Create(code, size_, type_).IsValid()); | 215 ASSERT(CPURegister::Create(code, size_, type_).IsValid()); |
| 212 list_ &= ~(1UL << code); | 216 list_ &= ~(1UL << code); |
| 213 } | 217 } |
| 214 | 218 |
| 215 | 219 |
| 216 inline Register Register::XRegFromCode(unsigned code) { | 220 inline Register Register::XRegFromCode(unsigned code) { |
| 217 // This function returns the zero register when code = 31. The stack pointer | 221 // This function returns the zero register when code = 31. The stack pointer |
| 218 // can not be returned. | 222 // can not be returned. |
| 219 ASSERT(code < kNumberOfRegisters); | 223 ASSERT(code < kNumberOfRegisters); |
| 220 return Register::Create(code, kXRegSize); | 224 return Register::Create(code, kXRegSizeInBits); |
| 221 } | 225 } |
| 222 | 226 |
| 223 | 227 |
| 224 inline Register Register::WRegFromCode(unsigned code) { | 228 inline Register Register::WRegFromCode(unsigned code) { |
| 225 ASSERT(code < kNumberOfRegisters); | 229 ASSERT(code < kNumberOfRegisters); |
| 226 return Register::Create(code, kWRegSize); | 230 return Register::Create(code, kWRegSizeInBits); |
| 227 } | 231 } |
| 228 | 232 |
| 229 | 233 |
| 230 inline FPRegister FPRegister::SRegFromCode(unsigned code) { | 234 inline FPRegister FPRegister::SRegFromCode(unsigned code) { |
| 231 ASSERT(code < kNumberOfFPRegisters); | 235 ASSERT(code < kNumberOfFPRegisters); |
| 232 return FPRegister::Create(code, kSRegSize); | 236 return FPRegister::Create(code, kSRegSizeInBits); |
| 233 } | 237 } |
| 234 | 238 |
| 235 | 239 |
| 236 inline FPRegister FPRegister::DRegFromCode(unsigned code) { | 240 inline FPRegister FPRegister::DRegFromCode(unsigned code) { |
| 237 ASSERT(code < kNumberOfFPRegisters); | 241 ASSERT(code < kNumberOfFPRegisters); |
| 238 return FPRegister::Create(code, kDRegSize); | 242 return FPRegister::Create(code, kDRegSizeInBits); |
| 239 } | 243 } |
| 240 | 244 |
| 241 | 245 |
| 242 inline Register CPURegister::W() const { | 246 inline Register CPURegister::W() const { |
| 243 ASSERT(IsValidRegister()); | 247 ASSERT(IsValidRegister()); |
| 244 return Register::WRegFromCode(reg_code); | 248 return Register::WRegFromCode(reg_code); |
| 245 } | 249 } |
| 246 | 250 |
| 247 | 251 |
| 248 inline Register CPURegister::X() const { | 252 inline Register CPURegister::X() const { |
| (...skipping 74 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 323 STATIC_ASSERT(OperandInitializer<T>::kIsIntType); | 327 STATIC_ASSERT(OperandInitializer<T>::kIsIntType); |
| 324 } | 328 } |
| 325 | 329 |
| 326 | 330 |
| 327 Operand::Operand(Register reg, Shift shift, unsigned shift_amount) | 331 Operand::Operand(Register reg, Shift shift, unsigned shift_amount) |
| 328 : reg_(reg), | 332 : reg_(reg), |
| 329 shift_(shift), | 333 shift_(shift), |
| 330 extend_(NO_EXTEND), | 334 extend_(NO_EXTEND), |
| 331 shift_amount_(shift_amount), | 335 shift_amount_(shift_amount), |
| 332 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) { | 336 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) { |
| 333 ASSERT(reg.Is64Bits() || (shift_amount < kWRegSize)); | 337 ASSERT(reg.Is64Bits() || (shift_amount < kWRegSizeInBits)); |
| 334 ASSERT(reg.Is32Bits() || (shift_amount < kXRegSize)); | 338 ASSERT(reg.Is32Bits() || (shift_amount < kXRegSizeInBits)); |
| 335 ASSERT(!reg.IsSP()); | 339 ASSERT(!reg.IsSP()); |
| 336 } | 340 } |
| 337 | 341 |
| 338 | 342 |
| 339 Operand::Operand(Register reg, Extend extend, unsigned shift_amount) | 343 Operand::Operand(Register reg, Extend extend, unsigned shift_amount) |
| 340 : reg_(reg), | 344 : reg_(reg), |
| 341 shift_(NO_SHIFT), | 345 shift_(NO_SHIFT), |
| 342 extend_(extend), | 346 extend_(extend), |
| 343 shift_amount_(shift_amount), | 347 shift_amount_(shift_amount), |
| 344 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) { | 348 rmode_(reg.Is64Bits() ? RelocInfo::NONE64 : RelocInfo::NONE32) { |
| (...skipping 202 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 547 | 551 |
| 548 | 552 |
| 549 Address Assembler::target_pointer_address_at(Address pc) { | 553 Address Assembler::target_pointer_address_at(Address pc) { |
| 550 Instruction* instr = reinterpret_cast<Instruction*>(pc); | 554 Instruction* instr = reinterpret_cast<Instruction*>(pc); |
| 551 ASSERT(instr->IsLdrLiteralX()); | 555 ASSERT(instr->IsLdrLiteralX()); |
| 552 return reinterpret_cast<Address>(instr->ImmPCOffsetTarget()); | 556 return reinterpret_cast<Address>(instr->ImmPCOffsetTarget()); |
| 553 } | 557 } |
| 554 | 558 |
| 555 | 559 |
| 556 // Read/Modify the code target address in the branch/call instruction at pc. | 560 // Read/Modify the code target address in the branch/call instruction at pc. |
| 557 Address Assembler::target_address_at(Address pc) { | 561 Address Assembler::target_address_at(Address pc, |
| 562 ConstantPoolArray* constant_pool) { |
| 558 return Memory::Address_at(target_pointer_address_at(pc)); | 563 return Memory::Address_at(target_pointer_address_at(pc)); |
| 559 } | 564 } |
| 560 | 565 |
| 561 | 566 |
| 567 Address Assembler::target_address_at(Address pc, Code* code) { |
| 568 ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; |
| 569 return target_address_at(pc, constant_pool); |
| 570 } |
| 571 |
| 572 |
| 562 Address Assembler::target_address_from_return_address(Address pc) { | 573 Address Assembler::target_address_from_return_address(Address pc) { |
| 563 // Returns the address of the call target from the return address that will | 574 // Returns the address of the call target from the return address that will |
| 564 // be returned to after a call. | 575 // be returned to after a call. |
| 565 // Call sequence on A64 is: | 576 // Call sequence on A64 is: |
| 566 // ldr ip0, #... @ load from literal pool | 577 // ldr ip0, #... @ load from literal pool |
| 567 // blr ip0 | 578 // blr ip0 |
| 568 Address candidate = pc - 2 * kInstructionSize; | 579 Address candidate = pc - 2 * kInstructionSize; |
| 569 Instruction* instr = reinterpret_cast<Instruction*>(candidate); | 580 Instruction* instr = reinterpret_cast<Instruction*>(candidate); |
| 570 USE(instr); | 581 USE(instr); |
| 571 ASSERT(instr->IsLdrLiteralX()); | 582 ASSERT(instr->IsLdrLiteralX()); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 605 } else { | 616 } else { |
| 606 // Verify the instruction sequence. | 617 // Verify the instruction sequence. |
| 607 ASSERT(instr->IsLdrLiteralX()); | 618 ASSERT(instr->IsLdrLiteralX()); |
| 608 ASSERT(instr->following(1)->IsBranchAndLinkToRegister()); | 619 ASSERT(instr->following(1)->IsBranchAndLinkToRegister()); |
| 609 return pc + Assembler::kCallSizeWithRelocation; | 620 return pc + Assembler::kCallSizeWithRelocation; |
| 610 } | 621 } |
| 611 } | 622 } |
| 612 | 623 |
| 613 | 624 |
| 614 void Assembler::deserialization_set_special_target_at( | 625 void Assembler::deserialization_set_special_target_at( |
| 615 Address constant_pool_entry, Address target) { | 626 Address constant_pool_entry, Code* code, Address target) { |
| 616 Memory::Address_at(constant_pool_entry) = target; | 627 Memory::Address_at(constant_pool_entry) = target; |
| 617 } | 628 } |
| 618 | 629 |
| 619 | 630 |
| 620 void Assembler::set_target_address_at(Address pc, Address target) { | 631 void Assembler::set_target_address_at(Address pc, |
| 632 ConstantPoolArray* constant_pool, |
| 633 Address target) { |
| 621 Memory::Address_at(target_pointer_address_at(pc)) = target; | 634 Memory::Address_at(target_pointer_address_at(pc)) = target; |
| 622 // Intuitively, we would think it is necessary to always flush the | 635 // Intuitively, we would think it is necessary to always flush the |
| 623 // instruction cache after patching a target address in the code as follows: | 636 // instruction cache after patching a target address in the code as follows: |
| 624 // CPU::FlushICache(pc, sizeof(target)); | 637 // CPU::FlushICache(pc, sizeof(target)); |
| 625 // However, on ARM, an instruction is actually patched in the case of | 638 // However, on ARM, an instruction is actually patched in the case of |
| 626 // embedded constants of the form: | 639 // embedded constants of the form: |
| 627 // ldr ip, [pc, #...] | 640 // ldr ip, [pc, #...] |
| 628 // since the instruction accessing this address in the constant pool remains | 641 // since the instruction accessing this address in the constant pool remains |
| 629 // unchanged, a flush is not required. | 642 // unchanged, a flush is not required. |
| 630 } | 643 } |
| 631 | 644 |
| 632 | 645 |
| 646 void Assembler::set_target_address_at(Address pc, |
| 647 Code* code, |
| 648 Address target) { |
| 649 ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; |
| 650 set_target_address_at(pc, constant_pool, target); |
| 651 } |
| 652 |
| 653 |
| 633 int RelocInfo::target_address_size() { | 654 int RelocInfo::target_address_size() { |
| 634 return kPointerSize; | 655 return kPointerSize; |
| 635 } | 656 } |
| 636 | 657 |
| 637 | 658 |
| 638 Address RelocInfo::target_address() { | 659 Address RelocInfo::target_address() { |
| 639 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); | 660 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); |
| 640 return Assembler::target_address_at(pc_); | 661 return Assembler::target_address_at(pc_, host_); |
| 641 } | 662 } |
| 642 | 663 |
| 643 | 664 |
| 644 Address RelocInfo::target_address_address() { | 665 Address RelocInfo::target_address_address() { |
| 645 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) | 666 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) |
| 646 || rmode_ == EMBEDDED_OBJECT | 667 || rmode_ == EMBEDDED_OBJECT |
| 647 || rmode_ == EXTERNAL_REFERENCE); | 668 || rmode_ == EXTERNAL_REFERENCE); |
| 648 return Assembler::target_pointer_address_at(pc_); | 669 return Assembler::target_pointer_address_at(pc_); |
| 649 } | 670 } |
| 650 | 671 |
| 651 | 672 |
| 673 Address RelocInfo::constant_pool_entry_address() { |
| 674 ASSERT(IsInConstantPool()); |
| 675 return Assembler::target_pointer_address_at(pc_); |
| 676 } |
| 677 |
| 678 |
| 652 Object* RelocInfo::target_object() { | 679 Object* RelocInfo::target_object() { |
| 653 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); | 680 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 654 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_)); | 681 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_)); |
| 655 } | 682 } |
| 656 | 683 |
| 657 | 684 |
| 658 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { | 685 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { |
| 659 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); | 686 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 660 return Handle<Object>(reinterpret_cast<Object**>( | 687 return Handle<Object>(reinterpret_cast<Object**>( |
| 661 Assembler::target_address_at(pc_))); | 688 Assembler::target_address_at(pc_, host_))); |
| 662 } | 689 } |
| 663 | 690 |
| 664 | 691 |
| 665 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { | 692 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { |
| 666 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); | 693 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 667 ASSERT(!target->IsConsString()); | 694 ASSERT(!target->IsConsString()); |
| 668 Assembler::set_target_address_at(pc_, reinterpret_cast<Address>(target)); | 695 Assembler::set_target_address_at(pc_, host_, |
| 696 reinterpret_cast<Address>(target)); |
| 669 if (mode == UPDATE_WRITE_BARRIER && | 697 if (mode == UPDATE_WRITE_BARRIER && |
| 670 host() != NULL && | 698 host() != NULL && |
| 671 target->IsHeapObject()) { | 699 target->IsHeapObject()) { |
| 672 host()->GetHeap()->incremental_marking()->RecordWrite( | 700 host()->GetHeap()->incremental_marking()->RecordWrite( |
| 673 host(), &Memory::Object_at(pc_), HeapObject::cast(target)); | 701 host(), &Memory::Object_at(pc_), HeapObject::cast(target)); |
| 674 } | 702 } |
| 675 } | 703 } |
| 676 | 704 |
| 677 | 705 |
| 678 Address RelocInfo::target_reference() { | 706 Address RelocInfo::target_reference() { |
| 679 ASSERT(rmode_ == EXTERNAL_REFERENCE); | 707 ASSERT(rmode_ == EXTERNAL_REFERENCE); |
| 680 return Assembler::target_address_at(pc_); | 708 return Assembler::target_address_at(pc_, host_); |
| 681 } | 709 } |
| 682 | 710 |
| 683 | 711 |
| 684 Address RelocInfo::target_runtime_entry(Assembler* origin) { | 712 Address RelocInfo::target_runtime_entry(Assembler* origin) { |
| 685 ASSERT(IsRuntimeEntry(rmode_)); | 713 ASSERT(IsRuntimeEntry(rmode_)); |
| 686 return target_address(); | 714 return target_address(); |
| 687 } | 715 } |
| 688 | 716 |
| 689 | 717 |
| 690 void RelocInfo::set_target_runtime_entry(Address target, | 718 void RelocInfo::set_target_runtime_entry(Address target, |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 739 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; | 767 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; |
| 740 Memory::Address_at(stub_entry_address) = stub->instruction_start(); | 768 Memory::Address_at(stub_entry_address) = stub->instruction_start(); |
| 741 } | 769 } |
| 742 | 770 |
| 743 | 771 |
| 744 Address RelocInfo::call_address() { | 772 Address RelocInfo::call_address() { |
| 745 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || | 773 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || |
| 746 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); | 774 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); |
| 747 // For the above sequences the Relocinfo points to the load literal loading | 775 // For the above sequences the Relocinfo points to the load literal loading |
| 748 // the call address. | 776 // the call address. |
| 749 return Assembler::target_address_at(pc_); | 777 return Assembler::target_address_at(pc_, host_); |
| 750 } | 778 } |
| 751 | 779 |
| 752 | 780 |
| 753 void RelocInfo::set_call_address(Address target) { | 781 void RelocInfo::set_call_address(Address target) { |
| 754 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || | 782 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || |
| 755 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); | 783 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); |
| 756 Assembler::set_target_address_at(pc_, target); | 784 Assembler::set_target_address_at(pc_, host_, target); |
| 757 if (host() != NULL) { | 785 if (host() != NULL) { |
| 758 Object* target_code = Code::GetCodeFromTargetAddress(target); | 786 Object* target_code = Code::GetCodeFromTargetAddress(target); |
| 759 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( | 787 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( |
| 760 host(), this, HeapObject::cast(target_code)); | 788 host(), this, HeapObject::cast(target_code)); |
| 761 } | 789 } |
| 762 } | 790 } |
| 763 | 791 |
| 764 | 792 |
| 765 void RelocInfo::WipeOut() { | 793 void RelocInfo::WipeOut() { |
| 766 ASSERT(IsEmbeddedObject(rmode_) || | 794 ASSERT(IsEmbeddedObject(rmode_) || |
| 767 IsCodeTarget(rmode_) || | 795 IsCodeTarget(rmode_) || |
| 768 IsRuntimeEntry(rmode_) || | 796 IsRuntimeEntry(rmode_) || |
| 769 IsExternalReference(rmode_)); | 797 IsExternalReference(rmode_)); |
| 770 Assembler::set_target_address_at(pc_, NULL); | 798 Assembler::set_target_address_at(pc_, host_, NULL); |
| 771 } | 799 } |
| 772 | 800 |
| 773 | 801 |
| 774 bool RelocInfo::IsPatchedReturnSequence() { | 802 bool RelocInfo::IsPatchedReturnSequence() { |
| 775 // The sequence must be: | 803 // The sequence must be: |
| 776 // ldr ip0, [pc, #offset] | 804 // ldr ip0, [pc, #offset] |
| 777 // blr ip0 | 805 // blr ip0 |
| 778 // See a64/debug-a64.cc BreakLocationIterator::SetDebugBreakAtReturn(). | 806 // See a64/debug-a64.cc BreakLocationIterator::SetDebugBreakAtReturn(). |
| 779 Instruction* i1 = reinterpret_cast<Instruction*>(pc_); | 807 Instruction* i1 = reinterpret_cast<Instruction*>(pc_); |
| 780 Instruction* i2 = i1->following(); | 808 Instruction* i2 = i1->following(); |
| (...skipping 208 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 989 ASSERT(IsImmAddSub(imm)); | 1017 ASSERT(IsImmAddSub(imm)); |
| 990 if (is_uint12(imm)) { // No shift required. | 1018 if (is_uint12(imm)) { // No shift required. |
| 991 return imm << ImmAddSub_offset; | 1019 return imm << ImmAddSub_offset; |
| 992 } else { | 1020 } else { |
| 993 return ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset); | 1021 return ((imm >> 12) << ImmAddSub_offset) | (1 << ShiftAddSub_offset); |
| 994 } | 1022 } |
| 995 } | 1023 } |
| 996 | 1024 |
| 997 | 1025 |
| 998 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) { | 1026 Instr Assembler::ImmS(unsigned imms, unsigned reg_size) { |
| 999 ASSERT(((reg_size == kXRegSize) && is_uint6(imms)) || | 1027 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(imms)) || |
| 1000 ((reg_size == kWRegSize) && is_uint5(imms))); | 1028 ((reg_size == kWRegSizeInBits) && is_uint5(imms))); |
| 1001 USE(reg_size); | 1029 USE(reg_size); |
| 1002 return imms << ImmS_offset; | 1030 return imms << ImmS_offset; |
| 1003 } | 1031 } |
| 1004 | 1032 |
| 1005 | 1033 |
| 1006 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) { | 1034 Instr Assembler::ImmR(unsigned immr, unsigned reg_size) { |
| 1007 ASSERT(((reg_size == kXRegSize) && is_uint6(immr)) || | 1035 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(immr)) || |
| 1008 ((reg_size == kWRegSize) && is_uint5(immr))); | 1036 ((reg_size == kWRegSizeInBits) && is_uint5(immr))); |
| 1009 USE(reg_size); | 1037 USE(reg_size); |
| 1010 ASSERT(is_uint6(immr)); | 1038 ASSERT(is_uint6(immr)); |
| 1011 return immr << ImmR_offset; | 1039 return immr << ImmR_offset; |
| 1012 } | 1040 } |
| 1013 | 1041 |
| 1014 | 1042 |
| 1015 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) { | 1043 Instr Assembler::ImmSetBits(unsigned imms, unsigned reg_size) { |
| 1016 ASSERT((reg_size == kWRegSize) || (reg_size == kXRegSize)); | 1044 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); |
| 1017 ASSERT(is_uint6(imms)); | 1045 ASSERT(is_uint6(imms)); |
| 1018 ASSERT((reg_size == kXRegSize) || is_uint6(imms + 3)); | 1046 ASSERT((reg_size == kXRegSizeInBits) || is_uint6(imms + 3)); |
| 1019 USE(reg_size); | 1047 USE(reg_size); |
| 1020 return imms << ImmSetBits_offset; | 1048 return imms << ImmSetBits_offset; |
| 1021 } | 1049 } |
| 1022 | 1050 |
| 1023 | 1051 |
| 1024 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) { | 1052 Instr Assembler::ImmRotate(unsigned immr, unsigned reg_size) { |
| 1025 ASSERT((reg_size == kWRegSize) || (reg_size == kXRegSize)); | 1053 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); |
| 1026 ASSERT(((reg_size == kXRegSize) && is_uint6(immr)) || | 1054 ASSERT(((reg_size == kXRegSizeInBits) && is_uint6(immr)) || |
| 1027 ((reg_size == kWRegSize) && is_uint5(immr))); | 1055 ((reg_size == kWRegSizeInBits) && is_uint5(immr))); |
| 1028 USE(reg_size); | 1056 USE(reg_size); |
| 1029 return immr << ImmRotate_offset; | 1057 return immr << ImmRotate_offset; |
| 1030 } | 1058 } |
| 1031 | 1059 |
| 1032 | 1060 |
| 1033 Instr Assembler::ImmLLiteral(int imm19) { | 1061 Instr Assembler::ImmLLiteral(int imm19) { |
| 1034 CHECK(is_int19(imm19)); | 1062 CHECK(is_int19(imm19)); |
| 1035 return truncate_to_int19(imm19) << ImmLLiteral_offset; | 1063 return truncate_to_int19(imm19) << ImmLLiteral_offset; |
| 1036 } | 1064 } |
| 1037 | 1065 |
| 1038 | 1066 |
| 1039 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) { | 1067 Instr Assembler::BitN(unsigned bitn, unsigned reg_size) { |
| 1040 ASSERT((reg_size == kWRegSize) || (reg_size == kXRegSize)); | 1068 ASSERT((reg_size == kWRegSizeInBits) || (reg_size == kXRegSizeInBits)); |
| 1041 ASSERT((reg_size == kXRegSize) || (bitn == 0)); | 1069 ASSERT((reg_size == kXRegSizeInBits) || (bitn == 0)); |
| 1042 USE(reg_size); | 1070 USE(reg_size); |
| 1043 return bitn << BitN_offset; | 1071 return bitn << BitN_offset; |
| 1044 } | 1072 } |
| 1045 | 1073 |
| 1046 | 1074 |
| 1047 Instr Assembler::ShiftDP(Shift shift) { | 1075 Instr Assembler::ShiftDP(Shift shift) { |
| 1048 ASSERT(shift == LSL || shift == LSR || shift == ASR || shift == ROR); | 1076 ASSERT(shift == LSL || shift == LSR || shift == ASR || shift == ROR); |
| 1049 return shift << ShiftDP_offset; | 1077 return shift << ShiftDP_offset; |
| 1050 } | 1078 } |
| 1051 | 1079 |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1171 void Assembler::LoadRelocated(const CPURegister& rt, const Operand& operand) { | 1199 void Assembler::LoadRelocated(const CPURegister& rt, const Operand& operand) { |
| 1172 LoadRelocatedValue(rt, operand, LDR_x_lit); | 1200 LoadRelocatedValue(rt, operand, LDR_x_lit); |
| 1173 } | 1201 } |
| 1174 | 1202 |
| 1175 | 1203 |
| 1176 inline void Assembler::CheckBuffer() { | 1204 inline void Assembler::CheckBuffer() { |
| 1177 ASSERT(pc_ < (buffer_ + buffer_size_)); | 1205 ASSERT(pc_ < (buffer_ + buffer_size_)); |
| 1178 if (buffer_space() < kGap) { | 1206 if (buffer_space() < kGap) { |
| 1179 GrowBuffer(); | 1207 GrowBuffer(); |
| 1180 } | 1208 } |
| 1181 if (pc_offset() >= next_buffer_check_) { | 1209 if (pc_offset() >= next_veneer_pool_check_) { |
| 1210 CheckVeneerPool(true); |
| 1211 } |
| 1212 if (pc_offset() >= next_constant_pool_check_) { |
| 1182 CheckConstPool(false, true); | 1213 CheckConstPool(false, true); |
| 1183 } | 1214 } |
| 1184 } | 1215 } |
| 1185 | 1216 |
| 1186 | 1217 |
| 1187 TypeFeedbackId Assembler::RecordedAstId() { | 1218 TypeFeedbackId Assembler::RecordedAstId() { |
| 1188 ASSERT(!recorded_ast_id_.IsNone()); | 1219 ASSERT(!recorded_ast_id_.IsNone()); |
| 1189 return recorded_ast_id_; | 1220 return recorded_ast_id_; |
| 1190 } | 1221 } |
| 1191 | 1222 |
| 1192 | 1223 |
| 1193 void Assembler::ClearRecordedAstId() { | 1224 void Assembler::ClearRecordedAstId() { |
| 1194 recorded_ast_id_ = TypeFeedbackId::None(); | 1225 recorded_ast_id_ = TypeFeedbackId::None(); |
| 1195 } | 1226 } |
| 1196 | 1227 |
| 1197 | 1228 |
| 1198 } } // namespace v8::internal | 1229 } } // namespace v8::internal |
| 1199 | 1230 |
| 1200 #endif // V8_A64_ASSEMBLER_A64_INL_H_ | 1231 #endif // V8_A64_ASSEMBLER_A64_INL_H_ |
| OLD | NEW |