Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1140)

Side by Side Diff: src/ppc/assembler-ppc-inl.h

Issue 422063005: Contribution of PowerPC port. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge
Patch Set: Created 6 years, 4 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright (c) 1994-2006 Sun Microsystems Inc. 1 // Copyright (c) 1994-2006 Sun Microsystems Inc.
2 // All Rights Reserved. 2 // All Rights Reserved.
3 // 3 //
4 // Redistribution and use in source and binary forms, with or without 4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions 5 // modification, are permitted provided that the following conditions
6 // are met: 6 // are met:
7 // 7 //
8 // - Redistributions of source code must retain the above copyright notice, 8 // - Redistributions of source code must retain the above copyright notice,
9 // this list of conditions and the following disclaimer. 9 // this list of conditions and the following disclaimer.
10 // 10 //
(...skipping 16 matching lines...) Expand all
27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) 27 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, 28 // HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 29 // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED 30 // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
31 // OF THE POSSIBILITY OF SUCH DAMAGE. 31 // OF THE POSSIBILITY OF SUCH DAMAGE.
32 32
33 // The original source code covered by the above license above has been modified 33 // The original source code covered by the above license above has been modified
34 // significantly by Google Inc. 34 // significantly by Google Inc.
35 // Copyright 2012 the V8 project authors. All rights reserved. 35 // Copyright 2012 the V8 project authors. All rights reserved.
36 36
37 #ifndef V8_ARM_ASSEMBLER_ARM_INL_H_ 37 //
38 #define V8_ARM_ASSEMBLER_ARM_INL_H_ 38 // Copyright IBM Corp. 2012, 2013. All rights reserved.
39 //
39 40
40 #include "src/arm/assembler-arm.h" 41 #ifndef V8_PPC_ASSEMBLER_PPC_INL_H_
42 #define V8_PPC_ASSEMBLER_PPC_INL_H_
43
44 #include "src/ppc/assembler-ppc.h"
41 45
42 #include "src/assembler.h" 46 #include "src/assembler.h"
43 #include "src/debug.h" 47 #include "src/debug.h"
44 48
45 49
46 namespace v8 { 50 namespace v8 {
47 namespace internal { 51 namespace internal {
48 52
49 53
50 bool CpuFeatures::SupportsCrankshaft() { return IsSupported(VFP3); } 54 bool CpuFeatures::SupportsCrankshaft() { return true; }
51 55
52 56
53 int Register::NumAllocatableRegisters() { 57 int Register::NumAllocatableRegisters() {
54 return kMaxNumAllocatableRegisters; 58 return kMaxNumAllocatableRegisters;
55 } 59 }
56 60
57 61
58 int DwVfpRegister::NumRegisters() { 62 int DoubleRegister::NumRegisters() {
59 return CpuFeatures::IsSupported(VFP32DREGS) ? 32 : 16; 63 return kNumRegisters;
60 } 64 }
61 65
62 66
63 int DwVfpRegister::NumReservedRegisters() { 67 int DoubleRegister::NumAllocatableRegisters() {
64 return kNumReservedRegisters; 68 return kMaxNumAllocatableRegisters;
65 } 69 }
66 70
67 71
68 int DwVfpRegister::NumAllocatableRegisters() { 72 int DoubleRegister::ToAllocationIndex(DoubleRegister reg) {
69 return NumRegisters() - kNumReservedRegisters; 73 int index = reg.code() - 1; // d0 is skipped
74 ASSERT(index < kMaxNumAllocatableRegisters);
75 ASSERT(!reg.is(kDoubleRegZero));
76 ASSERT(!reg.is(kScratchDoubleReg));
77 return index;
70 } 78 }
71 79
72
73 int DwVfpRegister::ToAllocationIndex(DwVfpRegister reg) {
74 ASSERT(!reg.is(kDoubleRegZero));
75 ASSERT(!reg.is(kScratchDoubleReg));
76 if (reg.code() > kDoubleRegZero.code()) {
77 return reg.code() - kNumReservedRegisters;
78 }
79 return reg.code();
80 }
81
82
83 DwVfpRegister DwVfpRegister::FromAllocationIndex(int index) {
84 ASSERT(index >= 0 && index < NumAllocatableRegisters());
85 ASSERT(kScratchDoubleReg.code() - kDoubleRegZero.code() ==
86 kNumReservedRegisters - 1);
87 if (index >= kDoubleRegZero.code()) {
88 return from_code(index + kNumReservedRegisters);
89 }
90 return from_code(index);
91 }
92
93
94 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) { 80 void RelocInfo::apply(intptr_t delta, ICacheFlushMode icache_flush_mode) {
81 #if ABI_USES_FUNCTION_DESCRIPTORS || V8_OOL_CONSTANT_POOL
95 if (RelocInfo::IsInternalReference(rmode_)) { 82 if (RelocInfo::IsInternalReference(rmode_)) {
96 // absolute code pointer inside code object moves with the code object. 83 // absolute code pointer inside code object moves with the code object.
97 int32_t* p = reinterpret_cast<int32_t*>(pc_); 84 Assembler::RelocateInternalReference(pc_, delta, 0, icache_flush_mode);
98 *p += delta; // relocate entry
99 } 85 }
100 // We do not use pc relative addressing on ARM, so there is 86 #endif
87 // We do not use pc relative addressing on PPC, so there is
101 // nothing else to do. 88 // nothing else to do.
102 } 89 }
103 90
104 91
105 Address RelocInfo::target_address() { 92 Address RelocInfo::target_address() {
106 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); 93 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
107 return Assembler::target_address_at(pc_, host_); 94 return Assembler::target_address_at(pc_, host_);
108 } 95 }
109 96
110 97
111 Address RelocInfo::target_address_address() { 98 Address RelocInfo::target_address_address() {
112 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_) 99 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)
113 || rmode_ == EMBEDDED_OBJECT 100 || rmode_ == EMBEDDED_OBJECT
114 || rmode_ == EXTERNAL_REFERENCE); 101 || rmode_ == EXTERNAL_REFERENCE);
115 if (FLAG_enable_ool_constant_pool || 102
116 Assembler::IsMovW(Memory::int32_at(pc_))) { 103 #if V8_OOL_CONSTANT_POOL
104 if (Assembler::IsConstantPoolLoadStart(pc_)) {
117 // We return the PC for ool constant pool since this function is used by the 105 // We return the PC for ool constant pool since this function is used by the
118 // serializerer and expects the address to reside within the code object. 106 // serializerer and expects the address to reside within the code object.
119 return reinterpret_cast<Address>(pc_); 107 return reinterpret_cast<Address>(pc_);
120 } else {
121 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc_)));
122 return constant_pool_entry_address();
123 } 108 }
109 #endif
110
111 // Read the address of the word containing the target_address in an
112 // instruction stream.
113 // The only architecture-independent user of this function is the serializer.
114 // The serializer uses it to find out how many raw bytes of instruction to
115 // output before the next target.
116 // For an instruction like LIS/ORI where the target bits are mixed into the
117 // instruction bits, the size of the target will be zero, indicating that the
118 // serializer should not step forward in memory after a target is resolved
119 // and written.
120 return reinterpret_cast<Address>(pc_);
124 } 121 }
125 122
126 123
127 Address RelocInfo::constant_pool_entry_address() { 124 Address RelocInfo::constant_pool_entry_address() {
128 ASSERT(IsInConstantPool()); 125 #if V8_OOL_CONSTANT_POOL
129 return Assembler::constant_pool_entry_address(pc_, host_->constant_pool()); 126 return Assembler::target_constant_pool_address_at(pc_,
127 host_->constant_pool());
128 #else
129 UNREACHABLE();
130 return NULL;
131 #endif
130 } 132 }
131 133
132 134
133 int RelocInfo::target_address_size() { 135 int RelocInfo::target_address_size() {
134 return kPointerSize; 136 return Assembler::kSpecialTargetSize;
135 } 137 }
136 138
137 139
138 void RelocInfo::set_target_address(Address target, 140 void RelocInfo::set_target_address(Address target,
139 WriteBarrierMode write_barrier_mode, 141 WriteBarrierMode write_barrier_mode,
140 ICacheFlushMode icache_flush_mode) { 142 ICacheFlushMode icache_flush_mode) {
141 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_)); 143 ASSERT(IsCodeTarget(rmode_) || IsRuntimeEntry(rmode_));
142 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode); 144 Assembler::set_target_address_at(pc_, host_, target, icache_flush_mode);
143 if (write_barrier_mode == UPDATE_WRITE_BARRIER && 145 if (write_barrier_mode == UPDATE_WRITE_BARRIER &&
144 host() != NULL && IsCodeTarget(rmode_)) { 146 host() != NULL && IsCodeTarget(rmode_)) {
145 Object* target_code = Code::GetCodeFromTargetAddress(target); 147 Object* target_code = Code::GetCodeFromTargetAddress(target);
146 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 148 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
147 host(), this, HeapObject::cast(target_code)); 149 host(), this, HeapObject::cast(target_code));
148 } 150 }
149 } 151 }
150 152
151 153
154 Address Assembler::target_address_from_return_address(Address pc) {
155 // Returns the address of the call target from the return address that will
156 // be returned to after a call.
157 // Call sequence is :
158 // mov ip, @ call address
159 // mtlr ip
160 // blrl
161 // @ return address
162 #if V8_OOL_CONSTANT_POOL
163 if (IsConstantPoolLoadEnd(pc - 3 * kInstrSize)) {
164 return pc - (kMovInstructionsConstantPool + 2) * kInstrSize;
165 }
166 #endif
167 return pc - (kMovInstructionsNoConstantPool + 2) * kInstrSize;
168 }
169
170
171 Address Assembler::return_address_from_call_start(Address pc) {
172 #if V8_OOL_CONSTANT_POOL
173 Address load_address = pc + (kMovInstructionsConstantPool - 1) * kInstrSize;
174 if (IsConstantPoolLoadEnd(load_address))
175 return pc + (kMovInstructionsConstantPool + 2) * kInstrSize;
176 #endif
177 return pc + (kMovInstructionsNoConstantPool + 2) * kInstrSize;
178 }
179
180
152 Object* RelocInfo::target_object() { 181 Object* RelocInfo::target_object() {
153 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 182 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
154 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_)); 183 return reinterpret_cast<Object*>(Assembler::target_address_at(pc_, host_));
155 } 184 }
156 185
157 186
158 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { 187 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) {
159 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); 188 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT);
160 return Handle<Object>(reinterpret_cast<Object**>( 189 return Handle<Object>(reinterpret_cast<Object**>(
161 Assembler::target_address_at(pc_, host_))); 190 Assembler::target_address_at(pc_, host_)));
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
220 Memory::Address_at(pc_) = address; 249 Memory::Address_at(pc_) = address;
221 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) { 250 if (write_barrier_mode == UPDATE_WRITE_BARRIER && host() != NULL) {
222 // TODO(1550) We are passing NULL as a slot because cell can never be on 251 // TODO(1550) We are passing NULL as a slot because cell can never be on
223 // evacuation candidate. 252 // evacuation candidate.
224 host()->GetHeap()->incremental_marking()->RecordWrite( 253 host()->GetHeap()->incremental_marking()->RecordWrite(
225 host(), NULL, cell); 254 host(), NULL, cell);
226 } 255 }
227 } 256 }
228 257
229 258
230 static const int kNoCodeAgeSequenceLength = 3 * Assembler::kInstrSize; 259 #if V8_OOL_CONSTANT_POOL
260 static const int kNoCodeAgeInstructions = 7;
261 #else
262 static const int kNoCodeAgeInstructions = 6;
263 #endif
264 static const int kCodeAgingInstructions =
265 Assembler::kMovInstructionsNoConstantPool + 3;
266 static const int kNoCodeAgeSequenceInstructions =
267 ((kNoCodeAgeInstructions >= kCodeAgingInstructions) ?
268 kNoCodeAgeInstructions : kCodeAgingInstructions);
269 static const int kNoCodeAgeSequenceNops = (kNoCodeAgeSequenceInstructions -
270 kNoCodeAgeInstructions);
271 static const int kCodeAgingSequenceNops = (kNoCodeAgeSequenceInstructions -
272 kCodeAgingInstructions);
273 static const int kCodeAgingTargetDelta = 1 * Assembler::kInstrSize;
274 static const int kCodeAgingPatchDelta = (kCodeAgingInstructions *
275 Assembler::kInstrSize);
276 static const int kNoCodeAgeSequenceLength = (kNoCodeAgeSequenceInstructions *
277 Assembler::kInstrSize);
231 278
232 279
233 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) { 280 Handle<Object> RelocInfo::code_age_stub_handle(Assembler* origin) {
234 UNREACHABLE(); // This should never be reached on Arm. 281 UNREACHABLE(); // This should never be reached on PPC.
235 return Handle<Object>(); 282 return Handle<Object>();
236 } 283 }
237 284
238 285
239 Code* RelocInfo::code_age_stub() { 286 Code* RelocInfo::code_age_stub() {
240 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); 287 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
241 return Code::GetCodeFromTargetAddress( 288 return Code::GetCodeFromTargetAddress(
242 Memory::Address_at(pc_ + 289 Assembler::target_address_at(pc_ + kCodeAgingTargetDelta, host_));
243 (kNoCodeAgeSequenceLength - Assembler::kInstrSize)));
244 } 290 }
245 291
246 292
247 void RelocInfo::set_code_age_stub(Code* stub, 293 void RelocInfo::set_code_age_stub(Code* stub,
248 ICacheFlushMode icache_flush_mode) { 294 ICacheFlushMode icache_flush_mode) {
249 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE); 295 ASSERT(rmode_ == RelocInfo::CODE_AGE_SEQUENCE);
250 Memory::Address_at(pc_ + 296 Assembler::set_target_address_at(pc_ + kCodeAgingTargetDelta,
251 (kNoCodeAgeSequenceLength - Assembler::kInstrSize)) = 297 host_,
252 stub->instruction_start(); 298 stub->instruction_start(),
299 icache_flush_mode);
253 } 300 }
254 301
255 302
256 Address RelocInfo::call_address() { 303 Address RelocInfo::call_address() {
257 // The 2 instructions offset assumes patched debug break slot or return
258 // sequence.
259 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || 304 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
260 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); 305 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
261 return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize); 306 // The pc_ offset of 0 assumes patched return sequence per
307 // BreakLocationIterator::SetDebugBreakAtReturn(), or debug break
308 // slot per BreakLocationIterator::SetDebugBreakAtSlot().
309 return Assembler::target_address_at(pc_, host_);
262 } 310 }
263 311
264 312
265 void RelocInfo::set_call_address(Address target) { 313 void RelocInfo::set_call_address(Address target) {
266 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) || 314 ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
267 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence())); 315 (IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
268 Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target; 316 Assembler::set_target_address_at(pc_, host_, target);
269 if (host() != NULL) { 317 if (host() != NULL) {
270 Object* target_code = Code::GetCodeFromTargetAddress(target); 318 Object* target_code = Code::GetCodeFromTargetAddress(target);
271 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( 319 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode(
272 host(), this, HeapObject::cast(target_code)); 320 host(), this, HeapObject::cast(target_code));
273 } 321 }
274 } 322 }
275 323
276 324
277 Object* RelocInfo::call_object() { 325 Object* RelocInfo::call_object() {
278 return *call_object_address(); 326 return *call_object_address();
(...skipping 15 matching lines...) Expand all
294 void RelocInfo::WipeOut() { 342 void RelocInfo::WipeOut() {
295 ASSERT(IsEmbeddedObject(rmode_) || 343 ASSERT(IsEmbeddedObject(rmode_) ||
296 IsCodeTarget(rmode_) || 344 IsCodeTarget(rmode_) ||
297 IsRuntimeEntry(rmode_) || 345 IsRuntimeEntry(rmode_) ||
298 IsExternalReference(rmode_)); 346 IsExternalReference(rmode_));
299 Assembler::set_target_address_at(pc_, host_, NULL); 347 Assembler::set_target_address_at(pc_, host_, NULL);
300 } 348 }
301 349
302 350
303 bool RelocInfo::IsPatchedReturnSequence() { 351 bool RelocInfo::IsPatchedReturnSequence() {
304 Instr current_instr = Assembler::instr_at(pc_); 352 //
305 Instr next_instr = Assembler::instr_at(pc_ + Assembler::kInstrSize); 353 // The patched return sequence is defined by
306 // A patched return sequence is: 354 // BreakLocationIterator::SetDebugBreakAtReturn()
307 // ldr ip, [pc, #0] 355 // FIXED_SEQUENCE
308 // blx ip 356
309 return Assembler::IsLdrPcImmediateOffset(current_instr) && 357 Instr instr0 = Assembler::instr_at(pc_);
310 Assembler::IsBlxReg(next_instr); 358 Instr instr1 = Assembler::instr_at(pc_ + 1 * Assembler::kInstrSize);
359 #if V8_TARGET_ARCH_PPC64
360 Instr instr3 = Assembler::instr_at(pc_ + (3 * Assembler::kInstrSize));
361 Instr instr4 = Assembler::instr_at(pc_ + (4 * Assembler::kInstrSize));
362 Instr binstr = Assembler::instr_at(pc_ + (7 * Assembler::kInstrSize));
363 #else
364 Instr binstr = Assembler::instr_at(pc_ + 4 * Assembler::kInstrSize);
365 #endif
366 bool patched_return = ((instr0 & kOpcodeMask) == ADDIS &&
367 (instr1 & kOpcodeMask) == ORI &&
368 #if V8_TARGET_ARCH_PPC64
369 (instr3 & kOpcodeMask) == ORIS &&
370 (instr4 & kOpcodeMask) == ORI &&
371 #endif
372 (binstr == 0x7d821008)); // twge r2, r2
373
374 // printf("IsPatchedReturnSequence: %d\n", patched_return);
375 return patched_return;
311 } 376 }
312 377
313 378
314 bool RelocInfo::IsPatchedDebugBreakSlotSequence() { 379 bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
315 Instr current_instr = Assembler::instr_at(pc_); 380 Instr current_instr = Assembler::instr_at(pc_);
316 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP); 381 return !Assembler::IsNop(current_instr, Assembler::DEBUG_BREAK_NOP);
317 } 382 }
318 383
319 384
320 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) { 385 void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
321 RelocInfo::Mode mode = rmode(); 386 RelocInfo::Mode mode = rmode();
322 if (mode == RelocInfo::EMBEDDED_OBJECT) { 387 if (mode == RelocInfo::EMBEDDED_OBJECT) {
323 visitor->VisitEmbeddedPointer(this); 388 visitor->VisitEmbeddedPointer(this);
324 } else if (RelocInfo::IsCodeTarget(mode)) { 389 } else if (RelocInfo::IsCodeTarget(mode)) {
325 visitor->VisitCodeTarget(this); 390 visitor->VisitCodeTarget(this);
326 } else if (mode == RelocInfo::CELL) { 391 } else if (mode == RelocInfo::CELL) {
327 visitor->VisitCell(this); 392 visitor->VisitCell(this);
328 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { 393 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
329 visitor->VisitExternalReference(this); 394 visitor->VisitExternalReference(this);
330 } else if (RelocInfo::IsCodeAgeSequence(mode)) { 395 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
331 visitor->VisitCodeAgeSequence(this); 396 visitor->VisitCodeAgeSequence(this);
332 } else if (((RelocInfo::IsJSReturn(mode) && 397 } else if (((RelocInfo::IsJSReturn(mode) &&
333 IsPatchedReturnSequence()) || 398 IsPatchedReturnSequence()) ||
334 (RelocInfo::IsDebugBreakSlot(mode) && 399 (RelocInfo::IsDebugBreakSlot(mode) &&
335 IsPatchedDebugBreakSlotSequence())) && 400 IsPatchedDebugBreakSlotSequence())) &&
336 isolate->debug()->has_break_points()) { 401 isolate->debug()->has_break_points()) {
337 visitor->VisitDebugTarget(this); 402 visitor->VisitDebugTarget(this);
338 } else if (RelocInfo::IsRuntimeEntry(mode)) { 403 } else if (IsRuntimeEntry(mode)) {
339 visitor->VisitRuntimeEntry(this); 404 visitor->VisitRuntimeEntry(this);
340 } 405 }
341 } 406 }
342 407
343 408
344 template<typename StaticVisitor> 409 template<typename StaticVisitor>
345 void RelocInfo::Visit(Heap* heap) { 410 void RelocInfo::Visit(Heap* heap) {
346 RelocInfo::Mode mode = rmode(); 411 RelocInfo::Mode mode = rmode();
347 if (mode == RelocInfo::EMBEDDED_OBJECT) { 412 if (mode == RelocInfo::EMBEDDED_OBJECT) {
348 StaticVisitor::VisitEmbeddedPointer(heap, this); 413 StaticVisitor::VisitEmbeddedPointer(heap, this);
349 } else if (RelocInfo::IsCodeTarget(mode)) { 414 } else if (RelocInfo::IsCodeTarget(mode)) {
350 StaticVisitor::VisitCodeTarget(heap, this); 415 StaticVisitor::VisitCodeTarget(heap, this);
351 } else if (mode == RelocInfo::CELL) { 416 } else if (mode == RelocInfo::CELL) {
352 StaticVisitor::VisitCell(heap, this); 417 StaticVisitor::VisitCell(heap, this);
353 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { 418 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) {
354 StaticVisitor::VisitExternalReference(this); 419 StaticVisitor::VisitExternalReference(this);
355 } else if (RelocInfo::IsCodeAgeSequence(mode)) { 420 } else if (RelocInfo::IsCodeAgeSequence(mode)) {
356 StaticVisitor::VisitCodeAgeSequence(heap, this); 421 StaticVisitor::VisitCodeAgeSequence(heap, this);
357 } else if (heap->isolate()->debug()->has_break_points() && 422 } else if (heap->isolate()->debug()->has_break_points() &&
358 ((RelocInfo::IsJSReturn(mode) && 423 ((RelocInfo::IsJSReturn(mode) &&
359 IsPatchedReturnSequence()) || 424 IsPatchedReturnSequence()) ||
360 (RelocInfo::IsDebugBreakSlot(mode) && 425 (RelocInfo::IsDebugBreakSlot(mode) &&
361 IsPatchedDebugBreakSlotSequence()))) { 426 IsPatchedDebugBreakSlotSequence()))) {
362 StaticVisitor::VisitDebugTarget(heap, this); 427 StaticVisitor::VisitDebugTarget(heap, this);
363 } else if (RelocInfo::IsRuntimeEntry(mode)) { 428 } else if (IsRuntimeEntry(mode)) {
364 StaticVisitor::VisitRuntimeEntry(this); 429 StaticVisitor::VisitRuntimeEntry(this);
365 } 430 }
366 } 431 }
367 432
368 433 Operand::Operand(intptr_t immediate, RelocInfo::Mode rmode) {
369 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) {
370 rm_ = no_reg; 434 rm_ = no_reg;
371 imm32_ = immediate; 435 imm_ = immediate;
372 rmode_ = rmode; 436 rmode_ = rmode;
373 } 437 }
374 438
375
376 Operand::Operand(const ExternalReference& f) { 439 Operand::Operand(const ExternalReference& f) {
377 rm_ = no_reg; 440 rm_ = no_reg;
378 imm32_ = reinterpret_cast<int32_t>(f.address()); 441 imm_ = reinterpret_cast<intptr_t>(f.address());
379 rmode_ = RelocInfo::EXTERNAL_REFERENCE; 442 rmode_ = RelocInfo::EXTERNAL_REFERENCE;
380 } 443 }
381 444
382
383 Operand::Operand(Smi* value) { 445 Operand::Operand(Smi* value) {
384 rm_ = no_reg; 446 rm_ = no_reg;
385 imm32_ = reinterpret_cast<intptr_t>(value); 447 imm_ = reinterpret_cast<intptr_t>(value);
386 rmode_ = RelocInfo::NONE32; 448 rmode_ = kRelocInfo_NONEPTR;
387 } 449 }
388
389 450
390 Operand::Operand(Register rm) { 451 Operand::Operand(Register rm) {
391 rm_ = rm; 452 rm_ = rm;
392 rs_ = no_reg; 453 rmode_ = kRelocInfo_NONEPTR; // PPC -why doesn't ARM do this?
393 shift_op_ = LSL; 454 }
394 shift_imm_ = 0;
395 }
396
397
398 bool Operand::is_reg() const {
399 return rm_.is_valid() &&
400 rs_.is(no_reg) &&
401 shift_op_ == LSL &&
402 shift_imm_ == 0;
403 }
404
405 455
406 void Assembler::CheckBuffer() { 456 void Assembler::CheckBuffer() {
407 if (buffer_space() <= kGap) { 457 if (buffer_space() <= kGap) {
408 GrowBuffer(); 458 GrowBuffer();
409 } 459 }
460 }
461
462 void Assembler::CheckTrampolinePoolQuick() {
410 if (pc_offset() >= next_buffer_check_) { 463 if (pc_offset() >= next_buffer_check_) {
411 CheckConstPool(false, true); 464 CheckTrampolinePool();
412 } 465 }
413 } 466 }
414
415 467
416 void Assembler::emit(Instr x) { 468 void Assembler::emit(Instr x) {
417 CheckBuffer(); 469 CheckBuffer();
418 *reinterpret_cast<Instr*>(pc_) = x; 470 *reinterpret_cast<Instr*>(pc_) = x;
419 pc_ += kInstrSize; 471 pc_ += kInstrSize;
420 } 472 CheckTrampolinePoolQuick();
421 473 }
422 474
423 Address Assembler::target_address_from_return_address(Address pc) { 475 bool Operand::is_reg() const {
424 // Returns the address of the call target from the return address that will 476 return rm_.is_valid();
425 // be returned to after a call. 477 }
426 // Call sequence on V7 or later is : 478
427 // movw ip, #... @ call address low 16 479
428 // movt ip, #... @ call address high 16 480 // Fetch the 32bit value from the FIXED_SEQUENCE lis/ori
429 // blx ip
430 // @ return address
431 // Or pre-V7 or cases that need frequent patching, the address is in the
432 // constant pool. It could be a small constant pool load:
433 // ldr ip, [pc / pp, #...] @ call address
434 // blx ip
435 // @ return address
436 // Or an extended constant pool load:
437 // movw ip, #...
438 // movt ip, #...
439 // ldr ip, [pc, ip] @ call address
440 // blx ip
441 // @ return address
442 Address candidate = pc - 2 * Assembler::kInstrSize;
443 Instr candidate_instr(Memory::int32_at(candidate));
444 if (IsLdrPcImmediateOffset(candidate_instr) |
445 IsLdrPpImmediateOffset(candidate_instr)) {
446 return candidate;
447 } else if (IsLdrPpRegOffset(candidate_instr)) {
448 candidate = pc - 4 * Assembler::kInstrSize;
449 ASSERT(IsMovW(Memory::int32_at(candidate)) &&
450 IsMovT(Memory::int32_at(candidate + Assembler::kInstrSize)));
451 return candidate;
452 } else {
453 candidate = pc - 3 * Assembler::kInstrSize;
454 ASSERT(IsMovW(Memory::int32_at(candidate)) &&
455 IsMovT(Memory::int32_at(candidate + kInstrSize)));
456 return candidate;
457 }
458 }
459
460
461 Address Assembler::return_address_from_call_start(Address pc) {
462 if (IsLdrPcImmediateOffset(Memory::int32_at(pc)) |
463 IsLdrPpImmediateOffset(Memory::int32_at(pc))) {
464 // Load from constant pool, small section.
465 return pc + kInstrSize * 2;
466 } else {
467 ASSERT(IsMovW(Memory::int32_at(pc)));
468 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
469 if (IsLdrPpRegOffset(Memory::int32_at(pc + kInstrSize))) {
470 // Load from constant pool, extended section.
471 return pc + kInstrSize * 4;
472 } else {
473 // A movw / movt load immediate.
474 return pc + kInstrSize * 3;
475 }
476 }
477 }
478
479
480 void Assembler::deserialization_set_special_target_at(
481 Address constant_pool_entry, Code* code, Address target) {
482 if (FLAG_enable_ool_constant_pool) {
483 set_target_address_at(constant_pool_entry, code, target);
484 } else {
485 Memory::Address_at(constant_pool_entry) = target;
486 }
487 }
488
489
490 bool Assembler::is_constant_pool_load(Address pc) {
491 return !Assembler::IsMovW(Memory::int32_at(pc)) ||
492 (FLAG_enable_ool_constant_pool &&
493 Assembler::IsLdrPpRegOffset(
494 Memory::int32_at(pc + 2 * Assembler::kInstrSize)));
495 }
496
497
498 Address Assembler::constant_pool_entry_address(
499 Address pc, ConstantPoolArray* constant_pool) {
500 if (FLAG_enable_ool_constant_pool) {
501 ASSERT(constant_pool != NULL);
502 int cp_offset;
503 if (IsMovW(Memory::int32_at(pc))) {
504 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)) &&
505 IsLdrPpRegOffset(Memory::int32_at(pc + 2 * kInstrSize)));
506 // This is an extended constant pool lookup.
507 Instruction* movw_instr = Instruction::At(pc);
508 Instruction* movt_instr = Instruction::At(pc + kInstrSize);
509 cp_offset = (movt_instr->ImmedMovwMovtValue() << 16) |
510 movw_instr->ImmedMovwMovtValue();
511 } else {
512 // This is a small constant pool lookup.
513 ASSERT(Assembler::IsLdrPpImmediateOffset(Memory::int32_at(pc)));
514 cp_offset = GetLdrRegisterImmediateOffset(Memory::int32_at(pc));
515 }
516 return reinterpret_cast<Address>(constant_pool) + cp_offset;
517 } else {
518 ASSERT(Assembler::IsLdrPcImmediateOffset(Memory::int32_at(pc)));
519 Instr instr = Memory::int32_at(pc);
520 return pc + GetLdrRegisterImmediateOffset(instr) + kPcLoadDelta;
521 }
522 }
523
524
525 Address Assembler::target_address_at(Address pc, 481 Address Assembler::target_address_at(Address pc,
526 ConstantPoolArray* constant_pool) { 482 ConstantPoolArray* constant_pool) {
527 if (is_constant_pool_load(pc)) { 483 Instr instr1 = instr_at(pc);
528 // This is a constant pool lookup. Return the value in the constant pool. 484 Instr instr2 = instr_at(pc + kInstrSize);
529 return Memory::Address_at(constant_pool_entry_address(pc, constant_pool)); 485 // Interpret 2 instructions generated by lis/ori
530 } else { 486 if (IsLis(instr1) && IsOri(instr2)) {
531 // This is an movw_movt immediate load. Return the immediate. 487 #if V8_TARGET_ARCH_PPC64
532 ASSERT(IsMovW(Memory::int32_at(pc)) && 488 Instr instr4 = instr_at(pc + (3*kInstrSize));
533 IsMovT(Memory::int32_at(pc + kInstrSize))); 489 Instr instr5 = instr_at(pc + (4*kInstrSize));
534 Instruction* movw_instr = Instruction::At(pc); 490 // Assemble the 64 bit value.
535 Instruction* movt_instr = Instruction::At(pc + kInstrSize); 491 uint64_t hi = (static_cast<uint32_t>((instr1 & kImm16Mask) << 16) |
492 static_cast<uint32_t>(instr2 & kImm16Mask));
493 uint64_t lo = (static_cast<uint32_t>((instr4 & kImm16Mask) << 16) |
494 static_cast<uint32_t>(instr5 & kImm16Mask));
495 return reinterpret_cast<Address>((hi << 32) | lo);
496 #else
497 // Assemble the 32 bit value.
536 return reinterpret_cast<Address>( 498 return reinterpret_cast<Address>(
537 (movt_instr->ImmedMovwMovtValue() << 16) | 499 ((instr1 & kImm16Mask) << 16) | (instr2 & kImm16Mask));
538 movw_instr->ImmedMovwMovtValue()); 500 #endif
539 } 501 }
540 } 502 #if V8_OOL_CONSTANT_POOL
541 503 return Memory::Address_at(
542 504 target_constant_pool_address_at(pc, constant_pool));
505 #else
506 ASSERT(false);
507 return (Address)0;
508 #endif
509 }
510
511
512 #if V8_OOL_CONSTANT_POOL
513 bool Assembler::IsConstantPoolLoadStart(Address pc) {
514 #if V8_TARGET_ARCH_PPC64
515 if (!IsLi(instr_at(pc))) return false;
516 pc += kInstrSize;
517 #endif
518 return GetRA(instr_at(pc)).is(kConstantPoolRegister);
519 }
520
521
522 bool Assembler::IsConstantPoolLoadEnd(Address pc) {
523 #if V8_TARGET_ARCH_PPC64
524 pc -= kInstrSize;
525 #endif
526 return IsConstantPoolLoadStart(pc);
527 }
528
529
530 int Assembler::GetConstantPoolOffset(Address pc) {
531 ASSERT(IsConstantPoolLoadStart(pc));
532 Instr instr = instr_at(pc);
533 int offset = SIGN_EXT_IMM16((instr & kImm16Mask));
534 return offset;
535 }
536
537
538 void Assembler::SetConstantPoolOffset(Address pc, int offset) {
539 ASSERT(IsConstantPoolLoadStart(pc));
540 ASSERT(is_int16(offset));
541 Instr instr = instr_at(pc);
542 instr &= ~kImm16Mask;
543 instr |= (offset & kImm16Mask);
544 instr_at_put(pc, instr);
545 }
546
547
548 Address Assembler::target_constant_pool_address_at(
549 Address pc, ConstantPoolArray* constant_pool) {
550 Address addr = reinterpret_cast<Address>(constant_pool);
551 ASSERT(addr);
552 addr += GetConstantPoolOffset(pc);
553 return addr;
554 }
555 #endif
556
557
558 // This sets the branch destination (which gets loaded at the call address).
559 // This is for calls and branches within generated code. The serializer
560 // has already deserialized the mov instructions etc.
561 // There is a FIXED_SEQUENCE assumption here
562 void Assembler::deserialization_set_special_target_at(
563 Address instruction_payload, Code* code, Address target) {
564 set_target_address_at(instruction_payload, code, target);
565 }
566
567 // This code assumes the FIXED_SEQUENCE of lis/ori
543 void Assembler::set_target_address_at(Address pc, 568 void Assembler::set_target_address_at(Address pc,
544 ConstantPoolArray* constant_pool, 569 ConstantPoolArray* constant_pool,
545 Address target, 570 Address target,
546 ICacheFlushMode icache_flush_mode) { 571 ICacheFlushMode icache_flush_mode) {
547 if (is_constant_pool_load(pc)) { 572 Instr instr1 = instr_at(pc);
548 // This is a constant pool lookup. Update the entry in the constant pool. 573 Instr instr2 = instr_at(pc + kInstrSize);
549 Memory::Address_at(constant_pool_entry_address(pc, constant_pool)) = target; 574 // Interpret 2 instructions generated by lis/ori
550 // Intuitively, we would think it is necessary to always flush the 575 if (IsLis(instr1) && IsOri(instr2)) {
551 // instruction cache after patching a target address in the code as follows: 576 #if V8_TARGET_ARCH_PPC64
552 // CpuFeatures::FlushICache(pc, sizeof(target)); 577 Instr instr4 = instr_at(pc + (3*kInstrSize));
553 // However, on ARM, no instruction is actually patched in the case 578 Instr instr5 = instr_at(pc + (4*kInstrSize));
554 // of embedded constants of the form: 579 // Needs to be fixed up when mov changes to handle 64-bit values.
555 // ldr ip, [pp, #...] 580 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
556 // since the instruction accessing this address in the constant pool remains 581 uintptr_t itarget = reinterpret_cast<uintptr_t>(target);
557 // unchanged. 582
583 instr5 &= ~kImm16Mask;
584 instr5 |= itarget & kImm16Mask;
585 itarget = itarget >> 16;
586
587 instr4 &= ~kImm16Mask;
588 instr4 |= itarget & kImm16Mask;
589 itarget = itarget >> 16;
590
591 instr2 &= ~kImm16Mask;
592 instr2 |= itarget & kImm16Mask;
593 itarget = itarget >> 16;
594
595 instr1 &= ~kImm16Mask;
596 instr1 |= itarget & kImm16Mask;
597 itarget = itarget >> 16;
598
599 *p = instr1;
600 *(p+1) = instr2;
601 *(p+3) = instr4;
602 *(p+4) = instr5;
603 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
604 CpuFeatures::FlushICache(p, 5 * kInstrSize);
605 }
606 #else
607 uint32_t* p = reinterpret_cast<uint32_t*>(pc);
608 uint32_t itarget = reinterpret_cast<uint32_t>(target);
609 int lo_word = itarget & kImm16Mask;
610 int hi_word = itarget >> 16;
611 instr1 &= ~kImm16Mask;
612 instr1 |= hi_word;
613 instr2 &= ~kImm16Mask;
614 instr2 |= lo_word;
615
616 *p = instr1;
617 *(p+1) = instr2;
618 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
619 CpuFeatures::FlushICache(p, 2 * kInstrSize);
620 }
621 #endif
558 } else { 622 } else {
559 // This is an movw_movt immediate load. Patch the immediate embedded in the 623 #if V8_OOL_CONSTANT_POOL
560 // instructions. 624 Memory::Address_at(
561 ASSERT(IsMovW(Memory::int32_at(pc))); 625 target_constant_pool_address_at(pc, constant_pool)) = target;
562 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize))); 626 #else
563 uint32_t* instr_ptr = reinterpret_cast<uint32_t*>(pc); 627 UNREACHABLE();
564 uint32_t immediate = reinterpret_cast<uint32_t>(target); 628 #endif
565 instr_ptr[0] = PatchMovwImmediate(instr_ptr[0], immediate & 0xFFFF); 629 }
566 instr_ptr[1] = PatchMovwImmediate(instr_ptr[1], immediate >> 16); 630 }
567 ASSERT(IsMovW(Memory::int32_at(pc)));
568 ASSERT(IsMovT(Memory::int32_at(pc + kInstrSize)));
569 if (icache_flush_mode != SKIP_ICACHE_FLUSH) {
570 CpuFeatures::FlushICache(pc, 2 * kInstrSize);
571 }
572 }
573 }
574
575 631
576 } } // namespace v8::internal 632 } } // namespace v8::internal
577 633
578 #endif // V8_ARM_ASSEMBLER_ARM_INL_H_ 634 #endif // V8_PPC_ASSEMBLER_PPC_INL_H_
OLDNEW
« src/objects-inl.h ('K') | « src/ppc/assembler-ppc.cc ('k') | src/ppc/builtins-ppc.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698