OLD | NEW |
(Empty) | |
| 1 // Copyright 2011-2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are |
| 4 // met: |
| 5 // |
| 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided |
| 11 // with the distribution. |
| 12 // * Neither the name of Google Inc. nor the names of its |
| 13 // contributors may be used to endorse or promote products derived |
| 14 // from this software without specific prior written permission. |
| 15 // |
| 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
| 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
| 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
| 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
| 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
| 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
| 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
| 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 27 |
| 28 #ifndef V8_SH4_ASSEMBLER_SH4_INL_H_ |
| 29 #define V8_SH4_ASSEMBLER_SH4_INL_H_ |
| 30 |
| 31 #include "sh4/assembler-sh4.h" |
| 32 #include "sh4/checks-sh4.h" |
| 33 #include "cpu.h" |
| 34 #include "debug.h" |
| 35 |
| 36 namespace v8 { |
| 37 namespace internal { |
| 38 |
| 39 void Assembler::CheckBuffer() { |
| 40 if (buffer_space() <= kGap) { |
| 41 GrowBuffer(); |
| 42 } |
| 43 // FIXME(STM): check if we must emit the constant pool |
| 44 } |
| 45 |
| 46 |
| 47 // The modes possibly affected by apply must be in kApplyMask. |
| 48 void RelocInfo::apply(intptr_t delta) { |
| 49 if (RelocInfo::IsInternalReference(rmode_)) { |
| 50 // absolute code pointer inside code object moves with the code object. |
| 51 int32_t* p = reinterpret_cast<int32_t*>(pc_); |
| 52 *p += delta; // relocate entry |
| 53 } |
| 54 // We do not use pc relative addressing on ARM, so there is |
| 55 // nothing else to do. |
| 56 } |
| 57 |
| 58 |
| 59 Address RelocInfo::target_address() { |
| 60 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); |
| 61 return Assembler::target_address_at(pc_); |
| 62 } |
| 63 |
| 64 |
| 65 Address RelocInfo::target_address_address() { |
| 66 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY |
| 67 || rmode_ == EMBEDDED_OBJECT |
| 68 || rmode_ == EXTERNAL_REFERENCE); |
| 69 return reinterpret_cast<Address>(Assembler::target_pointer_address_at(pc_)); |
| 70 } |
| 71 |
| 72 |
| 73 int RelocInfo::target_address_size() { |
| 74 return kPointerSize; |
| 75 } |
| 76 |
| 77 |
| 78 void RelocInfo::set_target_address(Address target, WriteBarrierMode mode) { |
| 79 ASSERT(IsCodeTarget(rmode_) || rmode_ == RUNTIME_ENTRY); |
| 80 Assembler::set_target_address_at(pc_, target); |
| 81 if (mode == UPDATE_WRITE_BARRIER && host() != NULL && IsCodeTarget(rmode_)) { |
| 82 Object* target_code = Code::GetCodeFromTargetAddress(target); |
| 83 host()->GetHeap()->incremental_marking()->RecordWriteIntoCode( |
| 84 host(), this, HeapObject::cast(target_code)); |
| 85 } |
| 86 } |
| 87 |
| 88 |
| 89 Object* RelocInfo::target_object() { |
| 90 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 91 return reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_)); |
| 92 } |
| 93 |
| 94 |
| 95 Handle<Object> RelocInfo::target_object_handle(Assembler* origin) { |
| 96 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 97 return Memory::Object_Handle_at(Assembler::target_pointer_address_at(pc_)); |
| 98 } |
| 99 |
| 100 |
| 101 Object** RelocInfo::target_object_address() { |
| 102 // Provide a "natural pointer" to the embedded object, |
| 103 // which can be de-referenced during heap iteration. |
| 104 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 105 reconstructed_obj_ptr_ = |
| 106 reinterpret_cast<Object*>(Assembler::target_pointer_at(pc_)); |
| 107 return &reconstructed_obj_ptr_; |
| 108 } |
| 109 |
| 110 |
| 111 void RelocInfo::set_target_object(Object* target, WriteBarrierMode mode) { |
| 112 ASSERT(IsCodeTarget(rmode_) || rmode_ == EMBEDDED_OBJECT); |
| 113 Assembler::set_target_pointer_at(pc_, reinterpret_cast<Address>(target)); |
| 114 if (mode == UPDATE_WRITE_BARRIER && |
| 115 host() != NULL && |
| 116 target->IsHeapObject()) { |
| 117 host()->GetHeap()->incremental_marking()->RecordWrite( |
| 118 host(), &Memory::Object_at(pc_), HeapObject::cast(target)); |
| 119 } |
| 120 } |
| 121 |
| 122 |
| 123 Address* RelocInfo::target_reference_address() { |
| 124 ASSERT(rmode_ == EXTERNAL_REFERENCE); |
| 125 reconstructed_adr_ptr_ = Assembler::target_address_at(pc_); |
| 126 return &reconstructed_adr_ptr_; |
| 127 } |
| 128 |
| 129 |
| 130 Handle<JSGlobalPropertyCell> RelocInfo::target_cell_handle() { |
| 131 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); |
| 132 Address address = Memory::Address_at(pc_); |
| 133 return Handle<JSGlobalPropertyCell>( |
| 134 reinterpret_cast<JSGlobalPropertyCell**>(address)); |
| 135 } |
| 136 |
| 137 |
| 138 JSGlobalPropertyCell* RelocInfo::target_cell() { |
| 139 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); |
| 140 return JSGlobalPropertyCell::FromValueAddress(Memory::Address_at(pc_)); |
| 141 } |
| 142 |
| 143 |
| 144 void RelocInfo::set_target_cell(JSGlobalPropertyCell* cell, |
| 145 WriteBarrierMode mode) { |
| 146 ASSERT(rmode_ == RelocInfo::GLOBAL_PROPERTY_CELL); |
| 147 Address address = cell->address() + JSGlobalPropertyCell::kValueOffset; |
| 148 Memory::Address_at(pc_) = address; |
| 149 if (mode == UPDATE_WRITE_BARRIER && host() != NULL) { |
| 150 // TODO(1550) We are passing NULL as a slot because cell can never be on |
| 151 // evacuation candidate. |
| 152 host()->GetHeap()->incremental_marking()->RecordWrite( |
| 153 host(), NULL, cell); |
| 154 } |
| 155 CPU::FlushICache(pc_, sizeof(Address)); |
| 156 } |
| 157 |
| 158 |
| 159 Address RelocInfo::call_address() { |
| 160 UNIMPLEMENTED(); |
| 161 return NULL; |
| 162 } |
| 163 |
| 164 |
| 165 void RelocInfo::set_call_address(Address target) { |
| 166 UNIMPLEMENTED(); |
| 167 } |
| 168 |
| 169 |
| 170 Object* RelocInfo::call_object() { |
| 171 return *call_object_address(); |
| 172 } |
| 173 |
| 174 |
| 175 void RelocInfo::set_call_object(Object* target) { |
| 176 *call_object_address() = target; |
| 177 } |
| 178 |
| 179 |
| 180 Object** RelocInfo::call_object_address() { |
| 181 UNIMPLEMENTED(); |
| 182 return NULL; |
| 183 } |
| 184 |
| 185 |
| 186 bool RelocInfo::IsPatchedReturnSequence() { |
| 187 UNIMPLEMENTED(); |
| 188 return false; |
| 189 } |
| 190 |
| 191 |
| 192 bool RelocInfo::IsPatchedDebugBreakSlotSequence() { |
| 193 UNIMPLEMENTED(); |
| 194 return false; |
| 195 } |
| 196 |
| 197 |
| 198 void RelocInfo::Visit(ObjectVisitor* visitor) { |
| 199 RelocInfo::Mode mode = rmode(); |
| 200 if (mode == RelocInfo::EMBEDDED_OBJECT) { |
| 201 visitor->VisitEmbeddedPointer(this); |
| 202 CPU::FlushICache(pc_, sizeof(Address)); |
| 203 } else if (RelocInfo::IsCodeTarget(mode)) { |
| 204 visitor->VisitCodeTarget(this); |
| 205 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) { |
| 206 visitor->VisitGlobalPropertyCell(this); |
| 207 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { |
| 208 visitor->VisitExternalReference(this); |
| 209 CPU::FlushICache(pc_, sizeof(Address)); |
| 210 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 211 // TODO(isolates): Get a cached isolate below. |
| 212 } else if (((RelocInfo::IsJSReturn(mode) && |
| 213 IsPatchedReturnSequence()) || |
| 214 (RelocInfo::IsDebugBreakSlot(mode) && |
| 215 IsPatchedDebugBreakSlotSequence())) && |
| 216 Isolate::Current()->debug()->has_break_points()) { |
| 217 visitor->VisitDebugTarget(this); |
| 218 #endif |
| 219 } else if (mode == RelocInfo::RUNTIME_ENTRY) { |
| 220 visitor->VisitRuntimeEntry(this); |
| 221 } |
| 222 } |
| 223 |
| 224 |
| 225 template<typename StaticVisitor> |
| 226 void RelocInfo::Visit(Heap* heap) { |
| 227 RelocInfo::Mode mode = rmode(); |
| 228 if (mode == RelocInfo::EMBEDDED_OBJECT) { |
| 229 StaticVisitor::VisitEmbeddedPointer(heap, this); |
| 230 CPU::FlushICache(pc_, sizeof(Address)); |
| 231 } else if (RelocInfo::IsCodeTarget(mode)) { |
| 232 StaticVisitor::VisitCodeTarget(heap, this); |
| 233 } else if (mode == RelocInfo::GLOBAL_PROPERTY_CELL) { |
| 234 StaticVisitor::VisitGlobalPropertyCell(heap, this); |
| 235 } else if (mode == RelocInfo::EXTERNAL_REFERENCE) { |
| 236 StaticVisitor::VisitExternalReference(this); |
| 237 CPU::FlushICache(pc_, sizeof(Address)); |
| 238 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 239 } else if (heap->isolate()->debug()->has_break_points() && |
| 240 ((RelocInfo::IsJSReturn(mode) && |
| 241 IsPatchedReturnSequence()) || |
| 242 (RelocInfo::IsDebugBreakSlot(mode) && |
| 243 IsPatchedDebugBreakSlotSequence()))) { |
| 244 StaticVisitor::VisitDebugTarget(heap, this); |
| 245 #endif |
| 246 } else if (mode == RelocInfo::RUNTIME_ENTRY) { |
| 247 StaticVisitor::VisitRuntimeEntry(this); |
| 248 } |
| 249 } |
| 250 |
| 251 |
| 252 Operand::Operand(int32_t immediate, RelocInfo::Mode rmode) { |
| 253 imm32_ = immediate; |
| 254 rmode_ = rmode; |
| 255 } |
| 256 |
| 257 |
| 258 Operand::Operand(const ExternalReference& f) { |
| 259 imm32_ = reinterpret_cast<int32_t>(f.address()); |
| 260 rmode_ = RelocInfo::EXTERNAL_REFERENCE; |
| 261 } |
| 262 |
| 263 |
| 264 Operand::Operand(Smi* value) { |
| 265 imm32_ = reinterpret_cast<intptr_t>(value); |
| 266 rmode_ = RelocInfo::NONE; |
| 267 } |
| 268 |
| 269 |
| 270 MemOperand::MemOperand(Register Rx, int32_t offset, AddrMode mode) { |
| 271 rm_ = Rx; |
| 272 rn_ = no_reg; |
| 273 offset_ = offset; |
| 274 mode_ = mode; |
| 275 } |
| 276 |
| 277 |
| 278 MemOperand::MemOperand(Register Rd, Register offset) { |
| 279 rm_ = Rd; |
| 280 rn_ = offset; |
| 281 offset_ = 0; |
| 282 mode_ = Offset; |
| 283 } |
| 284 |
| 285 |
| 286 Address Assembler::target_pointer_address_at(Address pc) { |
| 287 // Compute the actual address in the code where the address of the |
| 288 // jump/call/mov instruction is stored given the instruction pc. |
| 289 // Ref to functions that call Assembler::RecordRelocInfo() |
| 290 // such as Assembler::mov(), Assembler::jmp(), such as Assembler::jsr(). |
| 291 |
| 292 // All sequences for jmp/jsr/mov uses the same sequence as mov(), i.e.: |
| 293 // align 4; |
| 294 // movl pc+4 => R; nop; bra pc+4; nop; pool[0..32] |
| 295 // We compute the address of pool[0] given the pc address after the align |
| 296 Address pool_address = pc; |
| 297 ASSERT(IsMovlPcRelative(instr_at(pc))); // check if 'movl disp, pc' |
| 298 ASSERT(reinterpret_cast<uint32_t>(pc) % 4 == 0); // check after align |
| 299 pool_address += 4 * kInstrSize; |
| 300 return pool_address; |
| 301 } |
| 302 |
| 303 |
| 304 Address Assembler::target_pointer_at(Address pc) { |
| 305 return Memory::Address_at(target_pointer_address_at(pc)); |
| 306 } |
| 307 |
| 308 |
| 309 Address Assembler::target_address_from_return_address(Address pc) { |
| 310 // Returns the address of the call target from the return address that will |
| 311 // be returned to after a call. |
| 312 UNIMPLEMENTED(); |
| 313 return NULL; |
| 314 } |
| 315 |
| 316 Address Assembler::target_address_at(Address pc) { |
| 317 return target_pointer_at(pc); |
| 318 } |
| 319 |
| 320 |
| 321 void Assembler::set_target_pointer_at(Address pc, Address target) { |
| 322 Memory::Address_at(target_pointer_address_at(pc)) = target; |
| 323 // Intuitively, we would think it is necessary to always flush the |
| 324 // instruction cache after patching a target address in the code as follows: |
| 325 // CPU::FlushICache(pc, sizeof(target)); |
| 326 // However, on SH4, no instruction is actually patched in the case |
| 327 // of embedded constants. |
| 328 } |
| 329 |
| 330 |
| 331 void Assembler::set_target_address_at(Address pc, Address target) { |
| 332 set_target_pointer_at(pc, target); |
| 333 } |
| 334 |
| 335 |
| 336 Address Assembler::return_address_from_call_start(Address pc) { |
| 337 UNIMPLEMENTED(); |
| 338 return NULL; |
| 339 } |
| 340 |
| 341 |
| 342 int Assembler::align() { |
| 343 int count = 0; |
| 344 while (((unsigned)pc_ & 0x3) != 0) { |
| 345 nop_(); |
| 346 count++; |
| 347 } |
| 348 return count; |
| 349 } |
| 350 |
| 351 |
| 352 int Assembler::misalign() { |
| 353 int count = 0; |
| 354 while (((unsigned)pc_ & 0x3) != 2) { |
| 355 nop_(); |
| 356 count++; |
| 357 } |
| 358 return count; |
| 359 } |
| 360 |
| 361 |
| 362 void Assembler::cmp(Condition *cond, Register Rd, Register Rs) { |
| 363 Condition cond_to_test = eq; |
| 364 switch (*cond) { |
| 365 case ne: |
| 366 cond_to_test = ne; |
| 367 case eq: |
| 368 cmpeq(Rd, Rs); |
| 369 break; |
| 370 |
| 371 case lt: |
| 372 cond_to_test = ne; |
| 373 case ge: |
| 374 cmpge(Rd, Rs); |
| 375 break; |
| 376 |
| 377 case le: |
| 378 cond_to_test = ne; |
| 379 case gt: |
| 380 cmpgt(Rd, Rs); |
| 381 break; |
| 382 default: |
| 383 UNREACHABLE(); |
| 384 } |
| 385 *cond = cond_to_test; |
| 386 } |
| 387 |
| 388 |
| 389 void Assembler::cmpeq(Register Rd, const Operand& imm, Register rtmp) { |
| 390 if (Rd.is(r0) && FITS_SH4_cmpeq_imm_R0(imm.imm32_)) { |
| 391 cmpeq_imm_R0_(imm.imm32_); |
| 392 } else { |
| 393 mov(rtmp, imm); |
| 394 cmpeq_(rtmp, Rd); |
| 395 } |
| 396 } |
| 397 |
| 398 |
| 399 void Assembler::cmpgt(Register Rd, const Operand& imm, Register rtmp) { |
| 400 mov(rtmp, imm); |
| 401 cmpgt_(rtmp, Rd); |
| 402 } |
| 403 |
| 404 |
| 405 void Assembler::cmpge(Register Rd, const Operand& imm, Register rtmp) { |
| 406 mov(rtmp, imm); |
| 407 cmpge_(rtmp, Rd); |
| 408 } |
| 409 |
| 410 |
| 411 void Assembler::cmphi(Register Rd, const Operand& imm, Register rtmp) { |
| 412 mov(rtmp, imm); |
| 413 cmphi_(rtmp, Rd); |
| 414 } |
| 415 |
| 416 |
| 417 void Assembler::cmphs(Register Rd, const Operand& imm, Register rtmp) { |
| 418 mov(rtmp, imm); |
| 419 cmphs_(rtmp, Rd); |
| 420 } |
| 421 |
| 422 |
| 423 void Assembler::emit(Instr x) { |
| 424 CheckBuffer(); |
| 425 *reinterpret_cast<uint16_t*>(pc_) = x; |
| 426 pc_ += sizeof(uint16_t); |
| 427 } |
| 428 |
| 429 |
| 430 void Assembler::rsb(Register Rd, Register Rs, const Operand& imm, |
| 431 Register rtmp) { |
| 432 if (imm.imm32_ == 0 && imm.rmode_ == RelocInfo::NONE) { |
| 433 neg_(Rs, Rd); |
| 434 } else { |
| 435 mov(rtmp, imm); |
| 436 sub(Rd, rtmp, Rs); |
| 437 } |
| 438 } |
| 439 |
| 440 void Assembler::rsb(Register Rd, Register Rs, Register Rt) { |
| 441 sub(Rd, Rt, Rs); |
| 442 } |
| 443 |
| 444 |
| 445 void Assembler::rsb(Register Rd, Register Rs, const Operand& imm, |
| 446 Condition cond, Register rtmp) { |
| 447 ASSERT(cond == ne || cond == eq); |
| 448 if (imm.imm32_ == 0 && imm.rmode_ == RelocInfo::NONE) { |
| 449 if (cond == eq) |
| 450 bf_(0); // Jump after sequence if T bit is false |
| 451 else |
| 452 bt_(0); // Jump after sequence if T bit is true |
| 453 neg_(Rs, Rd); |
| 454 } else { |
| 455 Label end; |
| 456 if (cond == eq) |
| 457 bf_near(&end); // Jump after sequence if T bit is false |
| 458 else |
| 459 bt_near(&end); // Jump after sequence if T bit is true |
| 460 rsb(Rd, Rs, imm, rtmp); |
| 461 bind(&end); |
| 462 } |
| 463 } |
| 464 |
| 465 |
| 466 void Assembler::rts() { |
| 467 rts_(); |
| 468 nop_(); |
| 469 } |
| 470 |
| 471 |
| 472 void Assembler::ldr(Register Rd, const MemOperand& src, Register rtmp) { |
| 473 switch (src.mode_) { |
| 474 case PreIndex: |
| 475 add(src.rm_ , src.rm_, Operand(src.offset()), rtmp); |
| 476 mov(Rd, MemOperand(src.rm_, 0), rtmp); |
| 477 break; |
| 478 case PostIndex: |
| 479 mov(Rd, MemOperand(src.rm_, 0), rtmp); |
| 480 add(src.rm_, src.rm_, Operand(src.offset()), rtmp); |
| 481 break; |
| 482 case Offset: |
| 483 mov(Rd, src, rtmp); |
| 484 break; |
| 485 } |
| 486 } |
| 487 |
| 488 |
| 489 void Assembler::str(Register Rs, const MemOperand& dst, Register rtmp) { |
| 490 switch (dst.mode_) { |
| 491 case PreIndex: |
| 492 add(dst.rm_ , dst.rm_, Operand(dst.offset()), rtmp); |
| 493 mov(MemOperand(dst.rm_, 0), Rs, rtmp); |
| 494 break; |
| 495 case PostIndex: |
| 496 mov(MemOperand(dst.rm_, 0), Rs, rtmp); |
| 497 add(dst.rm_, dst.rm_, Operand(dst.offset()), rtmp); |
| 498 break; |
| 499 case Offset: |
| 500 mov(dst, Rs, rtmp); |
| 501 break; |
| 502 } |
| 503 } |
| 504 |
| 505 |
| 506 } } // namespace v8::internal |
| 507 |
| 508 #endif // V8_SH4_ASSEMBLER_SH4_INL_H_ |
OLD | NEW |