OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
11 // with the distribution. | 11 // with the distribution. |
12 // * Neither the name of Google Inc. nor the names of its | 12 // * Neither the name of Google Inc. nor the names of its |
13 // contributors may be used to endorse or promote products derived | 13 // contributors may be used to endorse or promote products derived |
14 // from this software without specific prior written permission. | 14 // from this software without specific prior written permission. |
15 // | 15 // |
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
27 | 27 |
28 #ifndef V8_A64_ASSEMBLER_A64_INL_H_ | 28 #ifndef V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
29 #define V8_A64_ASSEMBLER_A64_INL_H_ | 29 #define V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
30 | 30 |
31 #include "a64/assembler-a64.h" | 31 #include "arm64/assembler-arm64.h" |
32 #include "cpu.h" | 32 #include "cpu.h" |
33 #include "debug.h" | 33 #include "debug.h" |
34 | 34 |
35 | 35 |
36 namespace v8 { | 36 namespace v8 { |
37 namespace internal { | 37 namespace internal { |
38 | 38 |
39 | 39 |
40 void RelocInfo::apply(intptr_t delta) { | 40 void RelocInfo::apply(intptr_t delta) { |
41 UNIMPLEMENTED(); | 41 UNIMPLEMENTED(); |
(...skipping 524 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
566 | 566 |
567 Address Assembler::target_address_at(Address pc, Code* code) { | 567 Address Assembler::target_address_at(Address pc, Code* code) { |
568 ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; | 568 ConstantPoolArray* constant_pool = code ? code->constant_pool() : NULL; |
569 return target_address_at(pc, constant_pool); | 569 return target_address_at(pc, constant_pool); |
570 } | 570 } |
571 | 571 |
572 | 572 |
573 Address Assembler::target_address_from_return_address(Address pc) { | 573 Address Assembler::target_address_from_return_address(Address pc) { |
574 // Returns the address of the call target from the return address that will | 574 // Returns the address of the call target from the return address that will |
575 // be returned to after a call. | 575 // be returned to after a call. |
576 // Call sequence on A64 is: | 576 // Call sequence on ARM64 is: |
577 // ldr ip0, #... @ load from literal pool | 577 // ldr ip0, #... @ load from literal pool |
578 // blr ip0 | 578 // blr ip0 |
579 Address candidate = pc - 2 * kInstructionSize; | 579 Address candidate = pc - 2 * kInstructionSize; |
580 Instruction* instr = reinterpret_cast<Instruction*>(candidate); | 580 Instruction* instr = reinterpret_cast<Instruction*>(candidate); |
581 USE(instr); | 581 USE(instr); |
582 ASSERT(instr->IsLdrLiteralX()); | 582 ASSERT(instr->IsLdrLiteralX()); |
583 return candidate; | 583 return candidate; |
584 } | 584 } |
585 | 585 |
586 | 586 |
(...skipping 151 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
738 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) { | 738 void RelocInfo::set_target_cell(Cell* cell, WriteBarrierMode mode) { |
739 UNIMPLEMENTED(); | 739 UNIMPLEMENTED(); |
740 } | 740 } |
741 | 741 |
742 | 742 |
743 static const int kCodeAgeSequenceSize = 5 * kInstructionSize; | 743 static const int kCodeAgeSequenceSize = 5 * kInstructionSize; |
744 static const int kCodeAgeStubEntryOffset = 3 * kInstructionSize; | 744 static const int kCodeAgeStubEntryOffset = 3 * kInstructionSize; |
745 | 745 |
746 | 746 |
747 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { | 747 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { |
748 UNREACHABLE(); // This should never be reached on A64. | 748 UNREACHABLE(); // This should never be reached on ARM64. |
749 return Handle<Object>(); | 749 return Handle<Object>(); |
750 } | 750 } |
751 | 751 |
752 | 752 |
753 Code* RelocInfo::code_age_stub() { | 753 Code* RelocInfo::code_age_stub() { |
754 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); | 754 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); |
755 ASSERT(!Code::IsYoungSequence(pc_)); | 755 ASSERT(!Code::IsYoungSequence(pc_)); |
756 // Read the stub entry point from the code age sequence. | 756 // Read the stub entry point from the code age sequence. |
757 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; | 757 Address stub_entry_address = pc_ + kCodeAgeStubEntryOffset; |
758 return Code::GetCodeFromTargetAddress(Memory::Address_at(stub_entry_address)); | 758 return Code::GetCodeFromTargetAddress(Memory::Address_at(stub_entry_address)); |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
796 IsRuntimeEntry(rmode_) || | 796 IsRuntimeEntry(rmode_) || |
797 IsExternalReference(rmode_)); | 797 IsExternalReference(rmode_)); |
798 Assembler::set_target_address_at(pc_, host_, NULL); | 798 Assembler::set_target_address_at(pc_, host_, NULL); |
799 } | 799 } |
800 | 800 |
801 | 801 |
802 bool RelocInfo::IsPatchedReturnSequence() { | 802 bool RelocInfo::IsPatchedReturnSequence() { |
803 // The sequence must be: | 803 // The sequence must be: |
804 // ldr ip0, [pc, #offset] | 804 // ldr ip0, [pc, #offset] |
805 // blr ip0 | 805 // blr ip0 |
806 // See a64/debug-a64.cc BreakLocationIterator::SetDebugBreakAtReturn(). | 806 // See arm64/debug-arm64.cc BreakLocationIterator::SetDebugBreakAtReturn(). |
807 Instruction* i1 = reinterpret_cast<Instruction*>(pc_); | 807 Instruction* i1 = reinterpret_cast<Instruction*>(pc_); |
808 Instruction* i2 = i1->following(); | 808 Instruction* i2 = i1->following(); |
809 return i1->IsLdrLiteralX() && (i1->Rt() == ip0.code()) && | 809 return i1->IsLdrLiteralX() && (i1->Rt() == ip0.code()) && |
810 i2->IsBranchAndLinkToRegister() && (i2->Rn() == ip0.code()); | 810 i2->IsBranchAndLinkToRegister() && (i2->Rn() == ip0.code()); |
811 } | 811 } |
812 | 812 |
813 | 813 |
814 bool RelocInfo::IsPatchedDebugBreakSlotSequence() { | 814 bool RelocInfo::IsPatchedDebugBreakSlotSequence() { |
815 Instruction* current_instr = reinterpret_cast<Instruction*>(pc_); | 815 Instruction* current_instr = reinterpret_cast<Instruction*>(pc_); |
816 return !current_instr->IsNop(Assembler::DEBUG_BREAK_NOP); | 816 return !current_instr->IsNop(Assembler::DEBUG_BREAK_NOP); |
(...skipping 404 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1221 } | 1221 } |
1222 | 1222 |
1223 | 1223 |
1224 void Assembler::ClearRecordedAstId() { | 1224 void Assembler::ClearRecordedAstId() { |
1225 recorded_ast_id_ = TypeFeedbackId::None(); | 1225 recorded_ast_id_ = TypeFeedbackId::None(); |
1226 } | 1226 } |
1227 | 1227 |
1228 | 1228 |
1229 } } // namespace v8::internal | 1229 } } // namespace v8::internal |
1230 | 1230 |
1231 #endif // V8_A64_ASSEMBLER_A64_INL_H_ | 1231 #endif // V8_ARM64_ASSEMBLER_ARM64_INL_H_ |
OLD | NEW |