OLD | NEW |
1 // Copyright 2013 the V8 project authors. All rights reserved. | 1 // Copyright 2013 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #ifndef V8_ARM64_CODE_STUBS_ARM64_H_ | 5 #ifndef V8_ARM64_CODE_STUBS_ARM64_H_ |
6 #define V8_ARM64_CODE_STUBS_ARM64_H_ | 6 #define V8_ARM64_CODE_STUBS_ARM64_H_ |
7 | 7 |
8 namespace v8 { | 8 namespace v8 { |
9 namespace internal { | 9 namespace internal { |
10 | 10 |
(...skipping 112 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
123 | 123 |
124 // We patch the two first instructions of the stub back and forth between an | 124 // We patch the two first instructions of the stub back and forth between an |
125 // adr and branch when we start and stop incremental heap marking. | 125 // adr and branch when we start and stop incremental heap marking. |
126 // The branch is | 126 // The branch is |
127 // b label | 127 // b label |
128 // The adr is | 128 // The adr is |
129 // adr xzr label | 129 // adr xzr label |
130 // so effectively a nop. | 130 // so effectively a nop. |
131 static void Patch(Code* stub, Mode mode) { | 131 static void Patch(Code* stub, Mode mode) { |
132 // We are going to patch the two first instructions of the stub. | 132 // We are going to patch the two first instructions of the stub. |
133 PatchingAssembler patcher( | 133 PatchingAssembler patcher(stub->GetIsolate(), stub->instruction_start(), 2); |
134 stub->GetIsolate(), | |
135 reinterpret_cast<Instruction*>(stub->instruction_start()), 2); | |
136 Instruction* instr1 = patcher.InstructionAt(0); | 134 Instruction* instr1 = patcher.InstructionAt(0); |
137 Instruction* instr2 = patcher.InstructionAt(kInstructionSize); | 135 Instruction* instr2 = patcher.InstructionAt(kInstructionSize); |
138 // Instructions must be either 'adr' or 'b'. | 136 // Instructions must be either 'adr' or 'b'. |
139 DCHECK(instr1->IsPCRelAddressing() || instr1->IsUncondBranchImm()); | 137 DCHECK(instr1->IsPCRelAddressing() || instr1->IsUncondBranchImm()); |
140 DCHECK(instr2->IsPCRelAddressing() || instr2->IsUncondBranchImm()); | 138 DCHECK(instr2->IsPCRelAddressing() || instr2->IsUncondBranchImm()); |
141 // Retrieve the offsets to the labels. | 139 // Retrieve the offsets to the labels. |
142 auto offset_to_incremental_noncompacting = | 140 auto offset_to_incremental_noncompacting = |
143 static_cast<int32_t>(instr1->ImmPCOffset()); | 141 static_cast<int32_t>(instr1->ImmPCOffset()); |
144 auto offset_to_incremental_compacting = | 142 auto offset_to_incremental_compacting = |
145 static_cast<int32_t>(instr2->ImmPCOffset()); | 143 static_cast<int32_t>(instr2->ImmPCOffset()); |
(...skipping 196 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
342 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; | 340 class LookupModeBits: public BitField<LookupMode, 0, 1> {}; |
343 | 341 |
344 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); | 342 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR(); |
345 DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub); | 343 DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub); |
346 }; | 344 }; |
347 | 345 |
348 } // namespace internal | 346 } // namespace internal |
349 } // namespace v8 | 347 } // namespace v8 |
350 | 348 |
351 #endif // V8_ARM64_CODE_STUBS_ARM64_H_ | 349 #endif // V8_ARM64_CODE_STUBS_ARM64_H_ |
OLD | NEW |